MOOC Lesson 3 / 02_production.ipynb F1Score metric error

These are probably two very basic questions.

  1. I am playing with 02_production.ipynb after running it successfully all the way through. I attempted to add a second metric, F1Score, in addition to error_rate. I received an unexpected error, as shown below. This is a single category vision learner. Am I not using the function correctly?

  2. If I want to calculate F1Score for two models to compare their performance over a test set during inference, is there an easy way to calculate the F1Score for each model using learn.get_preds?

Thanks in advance for your help.

Jeff

Code:
learn = cnn_learner(dls, resnet18, metrics=[error_rate,F1Score])
learn.fine_tune(4)

epoch 	train_loss 	valid_loss 	error_rate 	F1Score 	time

0 1.418459 0.149099 0.078125 None 00:06


TypeError Traceback (most recent call last)
in
1 # learn = cnn_learner(dls, resnet18, metrics=error_rate)
2 learn = cnn_learner(dls, resnet18, metrics=[error_rate,F1Score])
----> 3 learn.fine_tune(4)

~/miniconda3/lib/python3.7/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

~/miniconda3/lib/python3.7/site-packages/fastai/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
159 “Fine tune with freeze for freeze_epochs then with unfreeze from epochs using discriminative LR”
160 self.freeze()
–> 161 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
162 base_lr /= 2
163 self.unfreeze()

~/miniconda3/lib/python3.7/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

~/miniconda3/lib/python3.7/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
111 scheds = {‘lr’: combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
112 ‘mom’: combined_cos(pct_start, *(self.moms if moms is None else moms))}
–> 113 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
114
115 # Cell

~/miniconda3/lib/python3.7/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
205 self.opt.set_hypers(lr=self.lr if lr is None else lr)
206 self.n_epoch = n_epoch
–> 207 self._with_events(self._do_fit, ‘fit’, CancelFitException, self._end_cleanup)
208
209 def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in _do_fit(self)
195 for epoch in range(self.n_epoch):
196 self.epoch=epoch
–> 197 self._with_events(self._do_epoch, ‘epoch’, CancelEpochException)
198
199 @log_args(but=‘cbs’)

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in _do_epoch(self)
190 def _do_epoch(self):
191 self._do_epoch_train()
–> 192 self._do_epoch_validate()
193
194 def _do_fit(self):

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in _do_epoch_validate(self, ds_idx, dl)
186 if dl is None: dl = self.dls[ds_idx]
187 self.dl = dl
–> 188 with torch.no_grad(): self._with_events(self.all_batches, ‘validate’, CancelValidException)
189
190 def _do_epoch(self):

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in all_batches(self)
159 def all_batches(self):
160 self.n_iter = len(self.dl)
–> 161 for o in enumerate(self.dl): self.one_batch(*o)
162
163 def _do_one_batch(self):

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in one_batch(self, i, b)
177 self.iter = i
178 self._split(b)
–> 179 self._with_events(self._do_one_batch, ‘batch’, CancelBatchException)
180
181 def _do_epoch_train(self):

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel_{event_type}’)
–> 157 finally: self(f’after_{event_type}’) ;final()
158
159 def all_batches(self):

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in call(self, event_name)
131 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
132
–> 133 def call(self, event_name): L(event_name).map(self._call_one)
134
135 def _call_one(self, event_name):

~/miniconda3/lib/python3.7/site-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
270 else f.format if isinstance(f,str)
271 else f.getitem)
–> 272 return self._new(map(g, self))
273
274 def filter(self, f, negate=False, **kwargs):

~/miniconda3/lib/python3.7/site-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
216 @property
217 def _xtra(self): return None
–> 218 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
219 def getitem(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
220 def copy(self): return self._new(self.items.copy())

~/miniconda3/lib/python3.7/site-packages/fastcore/foundation.py in call(cls, x, *args, **kwargs)
197 def call(cls, x=None, *args, **kwargs):
198 if not args and not kwargs and x is not None and isinstance(x,cls): return x
–> 199 return super().call(x, *args, **kwargs)
200
201 # Cell

~/miniconda3/lib/python3.7/site-packages/fastcore/foundation.py in init(self, items, use_list, match, *rest)
207 if items is None: items = []
208 if (use_list is not None) or not _is_array(items):
–> 209 items = list(items) if use_list else _listify(items)
210 if match is not None:
211 if is_coll(match): match = len(match)

~/miniconda3/lib/python3.7/site-packages/fastcore/foundation.py in _listify(o)
114 if isinstance(o, list): return o
115 if isinstance(o, str) or _is_array(o): return [o]
–> 116 if is_iter(o): return list(o)
117 return [o]
118

~/miniconda3/lib/python3.7/site-packages/fastcore/foundation.py in call(self, *args, **kwargs)
177 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
178 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
–> 179 return self.fn(*fargs, **kwargs)
180
181 # Cell

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in _call_one(self, event_name)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name), event_name
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in (.0)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name), event_name
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~/miniconda3/lib/python3.7/site-packages/fastai/callback/core.py in call(self, event_name)
42 (self.run_valid and not getattr(self, ‘training’, False)))
43 res = None
—> 44 if self.run and _run: res = getattr(self, event_name, noop)()
45 if event_name==‘after_fit’: self.run=True #Reset self.run to True at each end of fit
46 return res

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in after_batch(self)
448 if len(self.yb) == 0: return
449 mets = self._train_mets if self.training else self._valid_mets
–> 450 for met in mets: met.accumulate(self.learn)
451 if not self.training: return
452 self.lrs.append(self.opt.hypers[-1][‘lr’])

~/miniconda3/lib/python3.7/site-packages/fastai/learner.py in accumulate(self, learn)
372 def accumulate(self, learn):
373 bs = find_bs(learn.yb)
–> 374 self.total += learn.to_detach(self.func(learn.pred, *learn.yb))*bs
375 self.count += bs
376 @property

TypeError: unsupported operand type(s) for *: ‘AccumMetric’ and ‘int’

Since it’s a class you should call F1Score() IIRC when passing it to your metrics

3 Likes

Thanks again, Zachary. That fixed the problem.