AssertionError: Match length mismatch during learner.show_results()

I’m not able to view my inference results via show_results(). I get a AssertionError: Match length mismatch if I try to do a learner.show_results(). I’d really like to get an idea of where to look for my error!

628         if self.display:
629             print("Saved as " + self.name)
--> 630             self.learner.show_results() 
631             plt.show()
632 

/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in show_results(self, ds_idx, dl, max_n, shuffle, **kwargs)
253         b = dl.one_batch()
254         _,_,preds = self.get_preds(dl=[b], with_decoded=True)
--> 255         self.dls.show_results(b, preds, max_n=max_n, **kwargs)
256 
257     def show_training_loop(self):

/usr/local/lib/python3.6/dist-packages/fastai2/data/core.py in show_results(self, b, out, max_n, ctxs, show, **kwargs)
103         x,y,its = self.show_batch(b, max_n=max_n, show=False)
104         b_out = type(b)(b[:self.n_inp] + (tuple(out) if is_listy(out) else (out,)))
--> 105         x1,y1,outs = self.show_batch(b_out, max_n=max_n, show=False)
106         res = (x,x1,None,None) if its is None else (x, y, its, outs.itemgot(slice(self.n_inp,None)))
107         if not show: return res

/usr/local/lib/python3.6/dist-packages/fastai2/data/core.py in show_batch(self, b, max_n, ctxs, show, unique, **kwargs)
 96             self.get_idxs = lambda: Inf.zeros
 97         if b is None: b = self.one_batch()
---> 98         if not show: return self._pre_show_batch(b, max_n=max_n)
 99         show_batch(*self._pre_show_batch(b, max_n=max_n), ctxs=ctxs, max_n=max_n, **kwargs)
100         if unique: self.get_idxs = old_get_idxs

/usr/local/lib/python3.6/dist-packages/fastai2/data/core.py in _pre_show_batch(self, b, max_n)
 87         b = self.decode(b)
 88         if hasattr(b, 'show'): return b,None,None
---> 89         its = self._decode_batch(b, max_n, full=False)
 90         if not is_listy(b): b,its = [b],L((o,) for o in its)
 91         return detuplify(b[:self.n_inp]),detuplify(b[self.n_inp:]),its

/usr/local/lib/python3.6/dist-packages/fastai2/data/core.py in _decode_batch(self, b, max_n, full)
 81         f = self.after_item.decode
 82         f = compose(f, partial(getattr(self.dataset,'decode',noop), full = full))
---> 83         return L(batch_to_samples(b, max_n=max_n)).map(f)
 84 
 85     def _pre_show_batch(self, b, max_n=9):

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
375              else f.format if isinstance(f,str)
376              else f.__getitem__)
--> 377         return self._new(map(g, self))
378 
379     def filter(self, f, negate=False, **kwargs):

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
325     @property
326     def _xtra(self): return None
--> 327     def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
328     def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
329     def copy(self): return self._new(self.items.copy())

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
 45             return x
 46 
---> 47         res = super().__call__(*((x,) + args), **kwargs)
 48         res._newchk = 0
 49         return res

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
316         if items is None: items = []
317         if (use_list is not None) or not _is_array(items):
--> 318             items = list(items) if use_list else _listify(items)
319         if match is not None:
320             if is_coll(match): match = len(match)

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in _listify(o)
252     if isinstance(o, list): return o
253     if isinstance(o, str) or _is_array(o): return [o]
--> 254     if is_iter(o): return list(o)
255     return [o]
256 

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __call__(self, *args, **kwargs)
218             if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
219         fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 220         return self.fn(*fargs, **kwargs)
221 
222 # Cell

/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in _inner(x, *args, **kwargs)
347     if order is not None: funcs = funcs.sorted(order)
348     def _inner(x, *args, **kwargs):
--> 349         for f in L(funcs): x = f(x, *args, **kwargs)
350         return x
351     return _inner

/usr/local/lib/python3.6/dist-packages/fastai2/data/core.py in decode(self, o, full)
295     def __iter__(self): return (self[i] for i in range(len(self)))
296     def __repr__(self): return coll_repr(self)
--> 297     def decode(self, o, full=True): return tuple(tl.decode(o_, full=full) for o_,tl in zip(o,tuplify(self.tls, match=o)))
298     def subset(self, i): return type(self)(tls=L(tl.subset(i) for tl in self.tls), n_inp=self.n_inp)
299     def _new(self, items, *args, **kwargs): return super()._new(items, tfms=self.tfms, do_setup=False, **kwargs)

/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in tuplify(o, use_list, match)
133 def tuplify(o, use_list=False, match=None):
134     "Make `o` a tuple"
--> 135     return tuple(L(o, use_list=use_list, match=match))
136 
137 # Cell

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
 45             return x
 46 
---> 47         res = super().__call__(*((x,) + args), **kwargs)
 48         res._newchk = 0
 49         return res

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
320             if is_coll(match): match = len(match)
321             if len(items)==1: items = items*match
--> 322             else: assert len(items)==match, 'Match length mismatch'
323         super().__init__(items)
324 

AssertionError: Match length mismatch

How did you make your learner? Your data? We need more to know what you’re doing.

Apologies for the delay, I will try to reproduce the error on a public dataset and then provide more information here!

Right, so I’ve reproduced the error in this Kaggle notebook: https://www.kaggle.com/sanjan611/brain-mri-segmentation-fastai-v2

Some details -
My input if an MRI image (.tif), and my output is bounding box coordinates and a label (based on a mask).
blocks = (ImageBlock, BBoxBlock, BBoxLblBlock)
dataloader.show_batch works as expected.
My Learner successfully trained for 10 epochs using fit_one_cycle.

I’ve noticed that the basic learner.predict() raises an error as well, so maybe there is something fundamentally wrong?

No, predict and a few other functions do not work with fastai2 natively with object detection. As a lot is architecture specific.

Ah interesting. Could that be why show_results() is also throwing an error?

Yes, it is.

1 Like

Hello, I encountered a nearly similar error when trying inference with my test set. Any suggestion to fix this error is highly welcome.
Traceback (most recent call last):
File “~\FLproject\reg_train.py”, line 62, in
test_dl = learn.dls.test_dl(X_test_windows, y_test_windows, with_labels=True)
File “~\FLproject\venv\lib\site-packages\fastai\data\core.py”, line 535, in test_dl
test_ds = test_set(self.valid_ds, test_items, rm_tfms=rm_type_tfms, with_labels=with_labels
File “~\FLproject\venv\lib\site-packages\fastai\data\core.py”, line 515, in test_set
else: rm_tfms = tuplify(rm_tfms, match=test_tls)
File “~\FLproject\venv\lib\site-packages\fastcore\basics.py”, line 77, in tuplify
return tuple(listify(o, use_list=use_list, match=match))
File “~\FLproject\venv\lib\site-packages\fastcore\basics.py”, line 71, in listify
else: assert len(res)==match, ‘Match length mismatch’
AssertionError: Match length mismatch

The code I’m running is as follows:
splits = RandomSplitter(valid_pct=0.2)(range_of(X_train_windows))

tfms = [None, [TSStandardize(by_sample=True, by_var=True)]]
batch_tfms = [TSStandardize(by_sample=True, by_var=True)]

train_dls = get_ts_dls(X_train_windows, y_train_windows, splits=splits, tfms=tfms, batch_tfms=batch_tfms, bs=64)

model = TSTPlus(2, 1, seq_len=seq_length)
learn = ts_learner(train_dls, model, metrics=[mae, rmse])
learn.fit_one_cycle(5, 1e-2)

X_test_windows = X_test_windows.flatten().tolist()
y_test_windows = y_test_windows.flatten().tolist()

test_dl = learn.dls.test_dl(X_test_windows, y_test_windows, with_labels=True)
preds, targets = learn.get_preds(dl=test_dl, with_loss=True)