Stuck with the cross validation due to 'float' object is not callable

Hi Community,
As a part of my learning journey I wanted to implement KFold cross validation based on the chapter 1 dogs vs cats example. However, I stuck with the issue, that occurs quite randomly which is the TypeError: 'float' object is not callable error.

Here’s my code. First I put all image paths to the data frame to compute folds using StratifiedKFold.

from fastai.vision.all import *
from sklearn.model_selection import StratifiedKFold

path = untar_data(URLs.PETS)/'images'
imdf = pd.Series(get_image_files(path), name='path').to_frame()
imdf.head()

k = 3  # try 3 folds
strat_kfold = StratifiedKFold(n_splits=k, shuffle=True)

imdf['fold'] = -1
for i, (_, test_index) in enumerate(strat_kfold.split(imdf.path.values, imdf.fold.values)):
    imdf.iloc[test_index, -1] = i
imdf.head()

Files are distributes across folds
image

Distribution shows equal files distribution

imdf.fold.value_counts().plot.bar();

image

My training loop looks like below

def is_cat(fpath): 
    return Path(fpath).name[0].isupper()

error_rates = []

for i in range(k):
    print(f'Fold ({i})')
    dls_fold = DataBlock(
                    blocks=(ImageBlock, CategoryBlock),
                    get_x=ColReader('path'),
                    get_y=compose(ColReader('path'), is_cat),
                    splitter=IndexSplitter(imdf[imdf.fold == i].index),
                    item_tfms=Resize(224)).dataloaders(imdf)
    
    learn = cnn_learner(dls_fold, resnet34, metrics=error_rate)
    learn.fine_tune(1)
    loss, error_rate = learn.validate()
    error_rates.append(error_rate)

I’m not even trying to do an average of fold error_rates at this stage, because getting the error like below. However, fist time it happened at second fold, but most of the cases if I try to rerun it it fails on first fold.

I was checking the datasets in folds and it seems to be fine

Anyone have an idea what is wrong with it ?

The error I’m getting is:

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-8-1ffe3e8bc2b8> in <module>
     11 
     12     learn = cnn_learner(dls_fold, resnet34, metrics=error_rate)
---> 13     learn.fine_tune(1)
     14     loss, error_rate = learn.validate()
     15     error_rates.append(error_rate)

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
    155     "Fine tune with `freeze` for `freeze_epochs` then with `unfreeze` from `epochs` using discriminative LR"
    156     self.freeze()
--> 157     self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
    158     base_lr /= 2
    159     self.unfreeze()

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
    110     scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
    111               'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 112     self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
    113 
    114 # Cell

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
    203             self.opt.set_hypers(lr=self.lr if lr is None else lr)
    204             self.n_epoch = n_epoch
--> 205             self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
    206 
    207     def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_fit(self)
    194         for epoch in range(self.n_epoch):
    195             self.epoch=epoch
--> 196             self._with_events(self._do_epoch, 'epoch', CancelEpochException)
    197 
    198     def fit(self, n_epoch, lr=None, wd=None, cbs=None, reset_opt=False):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_epoch(self)
    189     def _do_epoch(self):
    190         self._do_epoch_train()
--> 191         self._do_epoch_validate()
    192 
    193     def _do_fit(self):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_epoch_validate(self, ds_idx, dl)
    185         if dl is None: dl = self.dls[ds_idx]
    186         self.dl = dl
--> 187         with torch.no_grad(): self._with_events(self.all_batches, 'validate', CancelValidException)
    188 
    189     def _do_epoch(self):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in all_batches(self)
    158     def all_batches(self):
    159         self.n_iter = len(self.dl)
--> 160         for o in enumerate(self.dl): self.one_batch(*o)
    161 
    162     def _do_one_batch(self):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in one_batch(self, i, b)
    176         self.iter = i
    177         self._split(b)
--> 178         self._with_events(self._do_one_batch, 'batch', CancelBatchException)
    179 
    180     def _do_epoch_train(self):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
--> 156         finally:   self(f'after_{event_type}')        ;final()
    157 
    158     def all_batches(self):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in __call__(self, event_name)
    130     def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
    131 
--> 132     def __call__(self, event_name): L(event_name).map(self._call_one)
    133 
    134     def _call_one(self, event_name):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastcore/foundation.py in map(self, f, gen, *args, **kwargs)
    177     def range(cls, a, b=None, step=None): return cls(range_of(a, b=b, step=step))
    178 
--> 179     def map(self, f, *args, gen=False, **kwargs): return self._new(map_ex(self, f, *args, gen=gen, **kwargs))
    180     def argwhere(self, f, negate=False, **kwargs): return self._new(argwhere(self, f, negate, **kwargs))
    181     def filter(self, f=noop, negate=False, gen=False, **kwargs):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastcore/basics.py in map_ex(iterable, f, gen, *args, **kwargs)
    605     res = map(g, iterable)
    606     if gen: return res
--> 607     return list(res)
    608 
    609 # Cell

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastcore/basics.py in __call__(self, *args, **kwargs)
    595             if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
    596         fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 597         return self.func(*fargs, **kwargs)
    598 
    599 # Cell

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _call_one(self, event_name)
    134     def _call_one(self, event_name):
    135         assert hasattr(event, event_name), event_name
--> 136         [cb(event_name) for cb in sort_by_run(self.cbs)]
    137 
    138     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in <listcomp>(.0)
    134     def _call_one(self, event_name):
    135         assert hasattr(event, event_name), event_name
--> 136         [cb(event_name) for cb in sort_by_run(self.cbs)]
    137 
    138     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/callback/core.py in __call__(self, event_name)
     42                (self.run_valid and not getattr(self, 'training', False)))
     43         res = None
---> 44         if self.run and _run: res = getattr(self, event_name, noop)()
     45         if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
     46         return res

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in after_batch(self)
    455         if len(self.yb) == 0: return
    456         mets = self._train_mets if self.training else self._valid_mets
--> 457         for met in mets: met.accumulate(self.learn)
    458         if not self.training: return
    459         self.lrs.append(self.opt.hypers[-1]['lr'])

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in accumulate(self, learn)
    377     def accumulate(self, learn):
    378         bs = find_bs(learn.yb)
--> 379         self.total += learn.to_detach(self.func(learn.pred, *learn.yb))*bs
    380         self.count += bs
    381     @property

TypeError: 'float' object is not callable

Omg. I did a stupid error in the line where I’m capturing metrics from validate() function

loss, error_rate = learn.validate()

That error_rate is then becomes obviously a float and in the next round of the for loop, that float value is used in here, instead of error_rate() function.

learn = cnn_learner(dls_fold, resnet34, metrics=error_rate)
2 Likes

may I ask to post the solution?

Hi @fabio.geraci. The problem was the usage of variable name the same as the function name of the metric. So the solution is very simple, e.g

loss,  metric = learn.validate()
error_rates.append(metric)
1 Like

thank you