OK, I did this:
def fp_tn(inp,targ):
tn, fp, fn, tp = confusion_matrix(targ.cpu().numpy(), inp.detach().cpu().numpy()).ravel()
return fp/tn
fptn_metric = AccumMetric(fp_tn)
But, I got an error after 1 epoch:
epoch train_loss valid_loss accuracy f1_score fp_tn time
0 0.392312 1.395006 0.796875 0.480000 None 00:51
/usr/local/lib/python3.6/dist-packages/torch/nn/functional.py:2854: UserWarning: The default behavior for interpolate/upsample with float scale_factor will change in 1.6.0 to align with other frameworks/libraries, and use scale_factor directly, instead of relying on the computed output size. If you wish to keep the old behavior, please set recompute_scale_factor=True. See the documentation of nn.Upsample for details.
warnings.warn("The default behavior for interpolate/upsample with float scale_factor will change "
---------------------------------------------------------------------------
AssertionError Traceback (most recent call last)
<ipython-input-63-dfea2fe8c944> in <module>()
----> 1 learn.fine_tune(1, 3e-2)
23 frames
/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in _f(*args, **kwargs)
429 init_args.update(log)
430 setattr(inst, 'init_args', init_args)
--> 431 return inst if to_return else f(*args, **kwargs)
432 return _f
433
/usr/local/lib/python3.6/dist-packages/fastai2/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
159 "Fine tune with `freeze` for `freeze_epochs` then with `unfreeze` from `epochs` using discriminative LR"
160 self.freeze()
--> 161 self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
162 base_lr /= 2
163 self.unfreeze()
/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in _f(*args, **kwargs)
429 init_args.update(log)
430 setattr(inst, 'init_args', init_args)
--> 431 return inst if to_return else f(*args, **kwargs)
432 return _f
433
/usr/local/lib/python3.6/dist-packages/fastai2/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
111 scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
112 'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 113 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
114
115 # Cell
/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in _f(*args, **kwargs)
429 init_args.update(log)
430 setattr(inst, 'init_args', init_args)
--> 431 return inst if to_return else f(*args, **kwargs)
432 return _f
433
/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
202 self.epoch=epoch; self('begin_epoch')
203 self._do_epoch_train()
--> 204 self._do_epoch_validate()
205 except CancelEpochException: self('after_cancel_epoch')
206 finally: self('after_epoch')
/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in _do_epoch_validate(self, ds_idx, dl)
181 try:
182 self.dl = dl; self('begin_validate')
--> 183 with torch.no_grad(): self.all_batches()
184 except CancelValidException: self('after_cancel_validate')
185 finally: self('after_validate')
/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in all_batches(self)
151 def all_batches(self):
152 self.n_iter = len(self.dl)
--> 153 for o in enumerate(self.dl): self.one_batch(*o)
154
155 def one_batch(self, i, b):
/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in one_batch(self, i, b)
165 self.opt.zero_grad()
166 except CancelBatchException: self('after_cancel_batch')
--> 167 finally: self('after_batch')
168
169 def _do_begin_fit(self, n_epoch):
/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in __call__(self, event_name)
132 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
133
--> 134 def __call__(self, event_name): L(event_name).map(self._call_one)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
375 else f.format if isinstance(f,str)
376 else f.__getitem__)
--> 377 return self._new(map(g, self))
378
379 def filter(self, f, negate=False, **kwargs):
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
325 @property
326 def _xtra(self): return None
--> 327 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
328 def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
329 def copy(self): return self._new(self.items.copy())
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
45 return x
46
---> 47 res = super().__call__(*((x,) + args), **kwargs)
48 res._newchk = 0
49 return res
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
316 if items is None: items = []
317 if (use_list is not None) or not _is_array(items):
--> 318 items = list(items) if use_list else _listify(items)
319 if match is not None:
320 if is_coll(match): match = len(match)
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in _listify(o)
252 if isinstance(o, list): return o
253 if isinstance(o, str) or _is_array(o): return [o]
--> 254 if is_iter(o): return list(o)
255 return [o]
256
/usr/local/lib/python3.6/dist-packages/fastcore/foundation.py in __call__(self, *args, **kwargs)
218 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
219 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 220 return self.fn(*fargs, **kwargs)
221
222 # Cell
/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in _call_one(self, event_name)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)
--> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)
/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in <listcomp>(.0)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)
--> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)
/usr/local/lib/python3.6/dist-packages/fastai2/callback/core.py in __call__(self, event_name)
22 _run = (event_name not in _inner_loop or (self.run_train and getattr(self, 'training', True)) or
23 (self.run_valid and not getattr(self, 'training', False)))
---> 24 if self.run and _run: getattr(self, event_name, noop)()
25 if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
26
/usr/local/lib/python3.6/dist-packages/fastai2/learner.py in after_batch(self)
427 if len(self.yb) == 0: return
428 mets = self._train_mets if self.training else self._valid_mets
--> 429 for met in mets: met.accumulate(self.learn)
430 if not self.training: return
431 self.lrs.append(self.opt.hypers[-1]['lr'])
/usr/local/lib/python3.6/dist-packages/fastai2/metrics.py in accumulate(self, learn)
44 targ = learn.y
45 pred,targ = to_detach(pred),to_detach(targ)
---> 46 if self.flatten: pred,targ = flatten_check(pred,targ)
47 self.preds.append(pred)
48 self.targs.append(targ)
/usr/local/lib/python3.6/dist-packages/fastai2/torch_core.py in flatten_check(inp, targ)
778 "Check that `out` and `targ` have the same number of elements and flatten them."
779 inp,targ = inp.contiguous().view(-1),targ.contiguous().view(-1)
--> 780 test_eq(len(inp), len(targ))
781 return inp,targ
/usr/local/lib/python3.6/dist-packages/fastcore/test.py in test_eq(a, b)
30 def test_eq(a,b):
31 "`test` that `a==b`"
---> 32 test(a,b,equals, '==')
33
34 # Cell
/usr/local/lib/python3.6/dist-packages/fastcore/test.py in test(a, b, cmp, cname)
20 "`assert` that `cmp(a,b)`; display inputs and `cname or cmp.__name__` if it fails"
21 if cname is None: cname=cmp.__name__
---> 22 assert cmp(a,b),f"{cname}:\n{a}\n{b}"
23
24 # Cell
AssertionError: ==:
128
64