F1Score metric problem on the NLP getting started competition Kaggle competition

I have just enrolled in the Kaggle competition, as practice for the course 1. Although I wanted to do a more complicated model, I wanted to start simple, so I tried using the simplest NLP classifier model of fast.ai. However, I am finding a complicated to understand issue. I have written

dls = TextDataLoaders.from_csv(path=path, 
                           csv_fname='train.csv', 
                           text_col='text', 
                           label_col='target', 
                           valid_pct=0.2)

This loads just fine. Then, replicating the NLP model:

learn = text_classifier_learner(dls, AWD_LSTM, drop_mult=0.5, metrics=F1Score)
learn.model_dir = '/kaggle/working/'
learn.fine_tune(2, 1e-2)

However, when trying to calculate the F1Score, it seems to get back an error

TypeError: unsupported operand type(s) for *: 'AccumMetric' and 'int'

Does anyone have any intuition of why does this happen and how would one go about solving it?


TypeError Traceback (most recent call last)
in
1 learn = text_classifier_learner(dls, AWD_LSTM, drop_mult=0.5, metrics=F1Score)
2 learn.model_dir = ‘/kaggle/working/’
----> 3 learn.fine_tune(2, 1e-2)

/opt/conda/lib/python3.7/site-packages/fastcore/utils.py in _f(*args, **kwargs)
452 init_args.update(log)
453 setattr(inst, ‘init_args’, init_args)
–> 454 return inst if to_return else f(*args, **kwargs)
455 return _f
456

/opt/conda/lib/python3.7/site-packages/fastai/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
159 “Fine tune with freeze for freeze_epochs then with unfreeze from epochs using discriminative LR”
160 self.freeze()
–> 161 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
162 base_lr /= 2
163 self.unfreeze()

/opt/conda/lib/python3.7/site-packages/fastcore/utils.py in _f(*args, **kwargs)
452 init_args.update(log)
453 setattr(inst, ‘init_args’, init_args)
–> 454 return inst if to_return else f(*args, **kwargs)
455 return _f
456

/opt/conda/lib/python3.7/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
111 scheds = {‘lr’: combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
112 ‘mom’: combined_cos(pct_start, *(self.moms if moms is None else moms))}
–> 113 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
114
115 # Cell

/opt/conda/lib/python3.7/site-packages/fastcore/utils.py in _f(*args, **kwargs)
452 init_args.update(log)
453 setattr(inst, ‘init_args’, init_args)
–> 454 return inst if to_return else f(*args, **kwargs)
455 return _f
456

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
202 self.opt.set_hypers(lr=self.lr if lr is None else lr)
203 self.n_epoch,self.loss = n_epoch,tensor(0.)
–> 204 self._with_events(self._do_fit, ‘fit’, CancelFitException, self._end_cleanup)
205
206 def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_fit(self)
192 for epoch in range(self.n_epoch):
193 self.epoch=epoch
–> 194 self._with_events(self._do_epoch, ‘epoch’, CancelEpochException)
195
196 @log_args(but=‘cbs’)

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_epoch(self)
187 def _do_epoch(self):
188 self._do_epoch_train()
–> 189 self._do_epoch_validate()
190
191 def _do_fit(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_epoch_validate(self, ds_idx, dl)
183 if dl is None: dl = self.dls[ds_idx]
184 self.dl = dl;
–> 185 with torch.no_grad(): self._with_events(self.all_batches, ‘validate’, CancelValidException)
186
187 def _do_epoch(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
153
154 def with_events(self, f, event_type, ex, final=noop):
–> 155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel
{event_type}’)
157 finally: self(f’after_{event_type}’) ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in all_batches(self)
159 def all_batches(self):
160 self.n_iter = len(self.dl)
–> 161 for o in enumerate(self.dl): self.one_batch(*o)
162
163 def _do_one_batch(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in one_batch(self, i, b)
174 self.iter = i
175 self._split(b)
–> 176 self._with_events(self._do_one_batch, ‘batch’, CancelBatchException)
177
178 def _do_epoch_train(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
155 try: self(f’before
{event_type}’) ;f()
156 except ex: self(f’after_cancel_{event_type}’)
–> 157 finally: self(f’after_{event_type}’) ;final()
158
159 def all_batches(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in call(self, event_name)
131 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
132
–> 133 def call(self, event_name): L(event_name).map(self._call_one)
134
135 def _call_one(self, event_name):

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
381 else f.format if isinstance(f,str)
382 else f.getitem)
–> 383 return self._new(map(g, self))
384
385 def filter(self, f, negate=False, **kwargs):

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
331 @property
332 def _xtra(self): return None
–> 333 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
334 def getitem(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
335 def copy(self): return self._new(self.items.copy())

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in call(cls, x, args, **kwargs)
45 return x
46
—> 47 res = super().call(
((x,) + args), **kwargs)
48 res._newchk = 0
49 return res

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in init(self, items, use_list, match, *rest)
322 if items is None: items = []
323 if (use_list is not None) or not _is_array(items):
–> 324 items = list(items) if use_list else _listify(items)
325 if match is not None:
326 if is_coll(match): match = len(match)

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in _listify(o)
258 if isinstance(o, list): return o
259 if isinstance(o, str) or _is_array(o): return [o]
–> 260 if is_iter(o): return list(o)
261 return [o]
262

/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in call(self, *args, **kwargs)
224 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
225 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
–> 226 return self.fn(*fargs, **kwargs)
227
228 # Cell

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _call_one(self, event_name)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name), event_name
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in (.0)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name), event_name
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

/opt/conda/lib/python3.7/site-packages/fastai/callback/core.py in call(self, event_name)
42 (self.run_valid and not getattr(self, ‘training’, False)))
43 res = None
—> 44 if self.run and _run: res = getattr(self, event_name, noop)()
45 if event_name==‘after_fit’: self.run=True #Reset self.run to True at each end of fit
46 return res

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in after_batch(self)
436 if len(self.yb) == 0: return
437 mets = self._train_mets if self.training else self._valid_mets
–> 438 for met in mets: met.accumulate(self.learn)
439 if not self.training: return
440 self.lrs.append(self.opt.hypers[-1][‘lr’])

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in accumulate(self, learn)
361 def accumulate(self, learn):
362 bs = find_bs(learn.yb)
–> 363 self.total += to_detach(self.func(learn.pred, *learn.yb))*bs
364 self.count += bs
365 @property

TypeError: unsupported operand type(s) for *: ‘AccumMetric’ and ‘int’

1 Like

learn = text_classifier_learner(dls, AWD_LSTM, drop_mult=0.5, metrics=F1Score)

I would expect that you may need to instantiate the metrics in this case. So try passing F1Score() instead.

5 Likes

It works, thanks!

@msivanes in which case we need to instantiate and in which case we dont . Eg. accuracy_mult dsnt needs one

Short answer: Metrics that has class based naming convention (starts with caps letter) need to be instantiated and the rest are methods.

See this thread for more details Understanding metrics and callbacks

1 Like