Assertion Error

Trying a new dataset with spectrographs I generated in MATLAB.

I did not hit this issue on prior images I generated in MATLAB after changing the batch_size in the dataloader.

I believe this may be related to my batch_size, because as I change the batch_size, the Assertion Error changes in such a manner where the error is

AssertionError: =:
(batch_size number)
64

In my error code below, you will see it used 12 above 64. That number will be 8 if I use batch_size = 8, so on and so forth.

I don’t know if it helps, but in this most recent error, I am also erroring out on GPU memory (not shown here)…

AssertionError Traceback (most recent call last)
in
1 learn = cnn_learner(dls, resnet18, metrics=error_rate)
----> 2 learn.fine_tune(4)

~/anaconda3/lib/python3.8/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

~/anaconda3/lib/python3.8/site-packages/fastai/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
159 “Fine tune with freeze for freeze_epochs then with unfreeze from epochs using discriminative LR”
160 self.freeze()
–> 161 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
162 base_lr /= 2
163 self.unfreeze()

~/anaconda3/lib/python3.8/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

~/anaconda3/lib/python3.8/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
111 scheds = {‘lr’: combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
112 ‘mom’: combined_cos(pct_start, *(self.moms if moms is None else moms))}
–> 113 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
114
115 # Cell

~/anaconda3/lib/python3.8/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
205 self.opt.set_hypers(lr=self.lr if lr is None else lr)
206 self.n_epoch = n_epoch
–> 207 self._with_events(self._do_fit, ‘fit’, CancelFitException, self._end_cleanup)
208
209 def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in _do_fit(self)
195 for epoch in range(self.n_epoch):
196 self.epoch=epoch
–> 197 self._with_events(self._do_epoch, ‘epoch’, CancelEpochException)
198
199 @log_args(but=‘cbs’)

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in _do_epoch(self)
190 def _do_epoch(self):
191 self._do_epoch_train()
–> 192 self._do_epoch_validate()
193
194 def _do_fit(self):

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in _do_epoch_validate(self, ds_idx, dl)
186 if dl is None: dl = self.dls[ds_idx]
187 self.dl = dl
–> 188 with torch.no_grad(): self._with_events(self.all_batches, ‘validate’, CancelValidException)
189
190 def _do_epoch(self):

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in all_batches(self)
159 def all_batches(self):
160 self.n_iter = len(self.dl)
–> 161 for o in enumerate(self.dl): self.one_batch(*o)
162
163 def _do_one_batch(self):

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in one_batch(self, i, b)
177 self.iter = i
178 self._split(b)
–> 179 self._with_events(self._do_one_batch, ‘batch’, CancelBatchException)
180
181 def _do_epoch_train(self):

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel_{event_type}’)
–> 157 finally: self(f’after_{event_type}’) ;final()
158
159 def all_batches(self):

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in call(self, event_name)
131 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
132
–> 133 def call(self, event_name): L(event_name).map(self._call_one)
134
135 def _call_one(self, event_name):

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
270 else f.format if isinstance(f,str)
271 else f.getitem)
–> 272 return self._new(map(g, self))
273
274 def filter(self, f, negate=False, **kwargs):

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
216 @property
217 def _xtra(self): return None
–> 218 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
219 def getitem(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
220 def copy(self): return self._new(self.items.copy())

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in call(cls, x, *args, **kwargs)
197 def call(cls, x=None, *args, **kwargs):
198 if not args and not kwargs and x is not None and isinstance(x,cls): return x
–> 199 return super().call(x, *args, **kwargs)
200
201 # Cell

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in init(self, items, use_list, match, *rest)
207 if items is None: items = []
208 if (use_list is not None) or not _is_array(items):
–> 209 items = list(items) if use_list else _listify(items)
210 if match is not None:
211 if is_coll(match): match = len(match)

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in _listify(o)
114 if isinstance(o, list): return o
115 if isinstance(o, str) or _is_array(o): return [o]
–> 116 if is_iter(o): return list(o)
117 return [o]
118

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in call(self, *args, **kwargs)
177 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
178 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
–> 179 return self.fn(*fargs, **kwargs)
180
181 # Cell

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in _call_one(self, event_name)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name), event_name
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in (.0)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name), event_name
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~/anaconda3/lib/python3.8/site-packages/fastai/callback/core.py in call(self, event_name)
42 (self.run_valid and not getattr(self, ‘training’, False)))
43 res = None
—> 44 if self.run and _run: res = getattr(self, event_name, noop)()
45 if event_name==‘after_fit’: self.run=True #Reset self.run to True at each end of fit
46 return res

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in after_batch(self)
448 if len(self.yb) == 0: return
449 mets = self._train_mets if self.training else self._valid_mets
–> 450 for met in mets: met.accumulate(self.learn)
451 if not self.training: return
452 self.lrs.append(self.opt.hypers[-1][‘lr’])

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in accumulate(self, learn)
372 def accumulate(self, learn):
373 bs = find_bs(learn.yb)
–> 374 self.total += learn.to_detach(self.func(learn.pred, *learn.yb))*bs
375 self.count += bs
376 @property

~/anaconda3/lib/python3.8/site-packages/fastai/metrics.py in error_rate(inp, targ, axis)
105 def error_rate(inp, targ, axis=-1):
106 "1 - accuracy"
–> 107 return 1 - accuracy(inp, targ, axis=axis)
108
109 # Cell

~/anaconda3/lib/python3.8/site-packages/fastai/metrics.py in accuracy(inp, targ, axis)
99 def accuracy(inp, targ, axis=-1):
100 “Compute accuracy with targ when pred is bs * n_classes”
–> 101 pred,targ = flatten_check(inp.argmax(dim=axis), targ)
102 return (pred == targ).float().mean()
103

~/anaconda3/lib/python3.8/site-packages/fastai/torch_core.py in flatten_check(inp, targ)
751 “Check that out and targ have the same number of elements and flatten them.”
752 inp,targ = inp.contiguous().view(-1),targ.contiguous().view(-1)
–> 753 test_eq(len(inp), len(targ))
754 return inp,targ

~/anaconda3/lib/python3.8/site-packages/fastcore/test.py in test_eq(a, b)
32 def test_eq(a,b):
33 "test that a==b"
—> 34 test(a,b,equals, ‘==’)
35
36 # Cell

~/anaconda3/lib/python3.8/site-packages/fastcore/test.py in test(a, b, cmp, cname)
22 "assert that cmp(a,b); display inputs and cname or cmp.__name__ if it fails"
23 if cname is None: cname=cmp.name
—> 24 assert cmp(a,b),f"{cname}:\n{a}\n{b}"
25
26 # Cell

AssertionError: ==:
12
64

I had similar issue and realised it was my metric. I was using accuracy_multi instead of accuracy. Do check that.

2 Likes

Thanks for responding Arshath. That wasn’t the issue. I believe I updated the git repository and all my dependencies again some time later, and got them to work. Not sure what the issue was.

1 Like

Hey marshath thanks. It worked for me.

1 Like