the model trains but learn.show_results( ) gives an assertion error
Could this be because one of the functions I supplied isn’t returning what fastAI expects?
(by “functions” I mean the “getters” and the “loss function” - see below)
AssertionError
learn.show_results()
AssertionError Traceback (most recent call last)
<ipython-input-22-c3b657dcc9ae> in <module>()
----> 1 learn.show_results()
15 frames
/usr/local/lib/python3.6/dist-packages/fastai/learner.py in show_results(self, ds_idx, dl, max_n, shuffle, **kwargs)
256 b = dl.one_batch()
257 _,_,preds = self.get_preds(dl=[b], with_decoded=True)
--> 258 self.dls.show_results(b, preds, max_n=max_n, **kwargs)
259
260 def show_training_loop(self):
/usr/local/lib/python3.6/dist-packages/fastai/data/core.py in show_results(self, b, out, max_n, ctxs, show, **kwargs)
106 x,y,its = self.show_batch(b, max_n=max_n, show=False)
107 b_out = type(b)(b[:self.n_inp] + (tuple(out) if is_listy(out) else (out,)))
--> 108 x1,y1,outs = self.show_batch(b_out, max_n=max_n, show=False)
109 res = (x,x1,None,None) if its is None else (x, y, its, outs.itemgot(slice(self.n_inp,None)))
110 if not show: return res
/usr/local/lib/python3.6/dist-packages/fastai/data/core.py in show_batch(self, b, max_n, ctxs, show, unique, **kwargs)
99 self.get_idxs = lambda: Inf.zeros
100 if b is None: b = self.one_batch()
--> 101 if not show: return self._pre_show_batch(b, max_n=max_n)
102 show_batch(*self._pre_show_batch(b, max_n=max_n), ctxs=ctxs, max_n=max_n, **kwargs)
103 if unique: self.get_idxs = old_get_idxs
/usr/local/lib/python3.6/dist-packages/fastai/data/core.py in _pre_show_batch(self, b, max_n)
90 b = self.decode(b)
91 if hasattr(b, 'show'): return b,None,None
---> 92 its = self._decode_batch(b, max_n, full=False)
93 if not is_listy(b): b,its = [b],L((o,) for o in its)
94 return detuplify(b[:self.n_inp]),detuplify(b[self.n_inp:]),its
/usr/local/lib/python3.6/dist-packages/fastai/data/core.py in _decode_batch(self, b, max_n, full)
84 f = self.after_item.decode
85 f = compose(f, partial(getattr(self.dataset,'decode',noop), full = full))
---> 86 return L(batch_to_samples(b, max_n=max_n)).map(f)
87
88 def _pre_show_batch(self, b, max_n=9):
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
381 else f.format if isinstance(f,str)
382 else f.__getitem__)
--> 383 return self._new(map(g, self))
384
385 def filter(self, f, negate=False, **kwargs):
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
331 @property
332 def _xtra(self): return None
--> 333 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
334 def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
335 def copy(self): return self._new(self.items.copy())
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
45 return x
46
---> 47 res = super().__call__(*((x,) + args), **kwargs)
48 res._newchk = 0
49 return res
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
322 if items is None: items = []
323 if (use_list is not None) or not _is_array(items):
--> 324 items = list(items) if use_list else _listify(items)
325 if match is not None:
326 if is_coll(match): match = len(match)
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in _listify(o)
258 if isinstance(o, list): return o
259 if isinstance(o, str) or _is_array(o): return [o]
--> 260 if is_iter(o): return list(o)
261 return [o]
262
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __call__(self, *args, **kwargs)
224 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
225 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 226 return self.fn(*fargs, **kwargs)
227
228 # Cell
/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in _inner(x, *args, **kwargs)
371 if order is not None: funcs = funcs.sorted(order)
372 def _inner(x, *args, **kwargs):
--> 373 for f in L(funcs): x = f(x, *args, **kwargs)
374 return x
375 return _inner
/usr/local/lib/python3.6/dist-packages/fastai/data/core.py in decode(self, o, full)
299 def __iter__(self): return (self[i] for i in range(len(self)))
300 def __repr__(self): return coll_repr(self)
--> 301 def decode(self, o, full=True): return tuple(tl.decode(o_, full=full) for o_,tl in zip(o,tuplify(self.tls, match=o)))
302 def subset(self, i): return type(self)(tls=L(tl.subset(i) for tl in self.tls), n_inp=self.n_inp)
303 def _new(self, items, *args, **kwargs): return super()._new(items, tfms=self.tfms, do_setup=False, **kwargs)
/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in tuplify(o, use_list, match)
145 def tuplify(o, use_list=False, match=None):
146 "Make `o` a tuple"
--> 147 return tuple(L(o, use_list=use_list, match=match))
148
149 # Cell
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
45 return x
46
---> 47 res = super().__call__(*((x,) + args), **kwargs)
48 res._newchk = 0
49 return res
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
326 if is_coll(match): match = len(match)
327 if len(items)==1: items = items*match
--> 328 else: assert len(items)==match, 'Match length mismatch'
329 super().__init__(items)
330
AssertionError: Match length mismatch
datablock and dataloaders
FYI - I want the model to predict a boundingbox (BBoxBlock), I’m not interested in any labels, but I understood that BBoxLblBlock needs to be added when using BBoxBlock.
# DataBlock
# TODO: where to seed the random number generator ?
datablock = DataBlock( blocks = (ImageBlock, BBoxBlock, BBoxLblBlock),
n_inp=1,
get_items = get_image_paths,
get_y = get_y,
splitter = RandomSplitter(),
item_tfms=None,
batch_tfms = None
)
dls = datablock.dataloaders(Path(project_folder),path=Path(project_folder),bs=16)
getters
# getters for BBoxBlock AND BBoxLblBlock
def get_bounding_box(x) :
result = ParsePath(x)
return [result]
def get_label(x) :
result = ["bla"] # dummy values obviously
# print("get_label(x)", x, "result", result)
return result
# combined 'getter'
get_y = [ get_bounding_box, get_label ]
# getter for [x]
def get_image_paths( path ) :
return get_image_files(project_folder)
# test
print(get_bounding_box("/content/data/FastAIDeDup03/TwitterImage_637308704744216343_BBOX(21,16,105,169).jpg"))
print(get_label("/content/data/FastAIDeDup03/TwitterImage_637308704744216343_BBOX(21,16,105,169).jpg"))
# output:
[[21.0, 16.0, 125.0, 184.0]]
['bla']
loss function
I’m only interested in how “close” the bounding boxes are (the labels don’t matter). This is my first attempt - does its return value match fastAI expectations?
def my_loss(output, target, extra):
loss = torch.sum((output - target)**2)
return loss
learner
learn = cnn_learner(dls,resnet50, loss_func = my_loss)
train
lr = slice(1e-5,1e-3)
learn.fit_one_cycle( lr_max=lr, n_epoch=8)
epoch | train_loss | valid_loss | time |
---|---|---|---|
0 | 3179.236084 | 1161.608032 | 02:48 |
1 | 1091.451172 | 267.312775 | 02:48 |
2 | 272.627411 | 99.748787 | 02:47 |
3 | 122.156326 | 76.042305 | 02:47 |
4 | 93.195999 | 71.138870 | 02:47 |
5 | 84.190552 | 68.116417 | 02:48 |
6 | 79.838127 | 65.860466 | 02:48 |
7 | 76.278267 | 65.580963 | 02:48 |