Learn.fine_tune causes AssertionError

I tried to make a net in colab for using Multi-label classification. fastai v1 works well with my data, learn.lr_find() from fastai v2 passed ok. But learn.fine_tune v2 reports after 1 pass:

learn.fine_tune(4, 0.005301)

epoch train_loss valid_loss accuracy_multi time
0 4.341522 3.269754 None 00:24

AssertionError Traceback (most recent call last)

in () ----> 1 learn.fine_tune(4, 0.0030199517495930195)

30 frames

/usr/local/lib/python3.6/dist-packages/fastcore/test.py in test(a, b, cmp, cname) 22 "assert that cmp(a,b); display inputs and cname or cmp.__name__ if it fails" 23 if cname is None: cname=cmp.name —> 24 assert cmp(a,b),f"{cname}:\n{a}\n{b}" 25 26 # Cell

AssertionError: ==: 2944 64

you need to expand that “30 frames” and post the call stack within a code block with triple ``` backticks either side

like
this

it’s failing some kind of sanity check on two values but we can’t see where it’s going wrong without the callstack.

AssertionError Traceback (most recent call last)
in ()
----> 1 learn.fine_tune(4, 0.0030199517495930195)

30 frames
/usr/local/lib/python3.6/dist-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

/usr/local/lib/python3.6/dist-packages/fastai/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
159 “Fine tune with freeze for freeze_epochs then with unfreeze from epochs using discriminative LR”
160 self.freeze()
–> 161 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
162 base_lr /= 2
163 self.unfreeze()

/usr/local/lib/python3.6/dist-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

/usr/local/lib/python3.6/dist-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
111 scheds = {‘lr’: combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
112 ‘mom’: combined_cos(pct_start, *(self.moms if moms is None else moms))}
–> 113 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
114
115 # Cell

/usr/local/lib/python3.6/dist-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
205 self.opt.set_hypers(lr=self.lr if lr is None else lr)
206 self.n_epoch = n_epoch
–> 207 self._with_events(self._do_fit, ‘fit’, CancelFitException, self._end_cleanup)
208
209 def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in _do_fit(self)
195 for epoch in range(self.n_epoch):
196 self.epoch=epoch
–> 197 self._with_events(self._do_epoch, ‘epoch’, CancelEpochException)
198
199 @log_args(but=‘cbs’)

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in _do_epoch(self)
190 def _do_epoch(self):
191 self._do_epoch_train()
–> 192 self._do_epoch_validate()
193
194 def _do_fit(self):

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in _do_epoch_validate(self, ds_idx, dl)
186 if dl is None: dl = self.dls[ds_idx]
187 self.dl = dl
–> 188 with torch.no_grad(): self._with_events(self.all_batches, ‘validate’, CancelValidException)
189
190 def _do_epoch(self):

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in all_batches(self)
159 def all_batches(self):
160 self.n_iter = len(self.dl)
–> 161 for o in enumerate(self.dl): self.one_batch(*o)
162
163 def _do_one_batch(self):

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in one_batch(self, i, b)
177 self.iter = i
178 self._split(b)
–> 179 self._with_events(self._do_one_batch, ‘batch’, CancelBatchException)
180
181 def _do_epoch_train(self):

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel_{event_type}’)
–> 157 finally: self(f’after_{event_type}’) ;final()
158
159 def all_batches(self):

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in call(self, event_name)
131 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
132
–> 133 def call(self, event_name): L(event_name).map(self._call_one)
134
135 def _call_one(self, event_name):

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
270 else f.format if isinstance(f,str)
271 else f.getitem)
–> 272 return self._new(map(g, self))
273
274 def filter(self, f, negate=False, **kwargs):

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
216 @property
217 def _xtra(self): return None
–> 218 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
219 def getitem(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
220 def copy(self): return self._new(self.items.copy())

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in call(cls, x, *args, **kwargs)
197 def call(cls, x=None, *args, **kwargs):
198 if not args and not kwargs and x is not None and isinstance(x,cls): return x
–> 199 return super().call(x, *args, **kwargs)
200
201 # Cell

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in init(self, items, use_list, match, *rest)
207 if items is None: items = []
208 if (use_list is not None) or not _is_array(items):
–> 209 items = list(items) if use_list else _listify(items)
210 if match is not None:
211 if is_coll(match): match = len(match)

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in _listify(o)
114 if isinstance(o, list): return o
115 if isinstance(o, str) or _is_array(o): return [o]
–> 116 if is_iter(o): return list(o)
117 return [o]
118

/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in call(self, *args, **kwargs)
177 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
178 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
–> 179 return self.fn(*fargs, **kwargs)
180
181 # Cell

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in _call_one(self, event_name)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name), event_name
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in (.0)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name), event_name
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

/usr/local/lib/python3.6/dist-packages/fastai/callback/core.py in call(self, event_name)
42 (self.run_valid and not getattr(self, ‘training’, False)))
43 res = None
—> 44 if self.run and _run: res = getattr(self, event_name, noop)()
45 if event_name==‘after_fit’: self.run=True #Reset self.run to True at each end of fit
46 return res

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in after_batch(self)
448 if len(self.yb) == 0: return
449 mets = self._train_mets if self.training else self._valid_mets
–> 450 for met in mets: met.accumulate(self.learn)
451 if not self.training: return
452 self.lrs.append(self.opt.hypers[-1][‘lr’])

/usr/local/lib/python3.6/dist-packages/fastai/learner.py in accumulate(self, learn)
372 def accumulate(self, learn):
373 bs = find_bs(learn.yb)
–> 374 self.total += learn.to_detach(self.func(learn.pred, *learn.yb))*bs
375 self.count += bs
376 @property

/usr/local/lib/python3.6/dist-packages/fastai/metrics.py in accuracy_multi(inp, targ, thresh, sigmoid)
205 def accuracy_multi(inp, targ, thresh=0.5, sigmoid=True):
206 “Compute accuracy when inp and targ are the same size.”
–> 207 inp,targ = flatten_check(inp,targ)
208 if sigmoid: inp = inp.sigmoid()
209 return ((inp>thresh)==targ.bool()).float().mean()

/usr/local/lib/python3.6/dist-packages/fastai/torch_core.py in flatten_check(inp, targ)
751 “Check that out and targ have the same number of elements and flatten them.”
752 inp,targ = inp.contiguous().view(-1),targ.contiguous().view(-1)
–> 753 test_eq(len(inp), len(targ))
754 return inp,targ

/usr/local/lib/python3.6/dist-packages/fastcore/test.py in test_eq(a, b)
32 def test_eq(a,b):
33 "test that a==b"
—> 34 test(a,b,equals, ‘==’)
35
36 # Cell

/usr/local/lib/python3.6/dist-packages/fastcore/test.py in test(a, b, cmp, cname)
22 "assert that cmp(a,b); display inputs and cname or cmp.__name__ if it fails"
23 if cname is None: cname=cmp.name
—> 24 assert cmp(a,b),f"{cname}:\n{a}\n{b}"
25
26 # Cell

AssertionError: ==:
2944

that’s the problem. i assume from what you were saying originally that you’ve taken a single label classifier and tried to turn it multi-label?

i haven’t played with fastai2 yet, i’m still doing part 2 2019, so you may need to post your model setup code and wait for someone else to tell you what you’ve done wrong, but i’m pretty sure if you search the forum that i’ve seen other threads for turning v2 classifiers multi-label.

[edit] or possibly just use a different metric?

I used parameters from the tutorial and tried to change them when got exception.
My code:
data = ImageDataLoaders.from_lists(path_img, fnames, classes, item_tfms=Resize(460), batch_tfms=aug_transforms(size=224), bs=64)
learn = cnn_learner(data, resnet50, metrics=partial(accuracy_multi, thresh=0.5))
learn.lr_find()
learn.fine_tune(4, 0.005301)

You are right. But for me it is weird if can not use multi classifier for single label bunch. It is just a special case for me)