Fastai v2 Recipes (Tips and Tricks) - Wiki

Thanks for nice examples! It tripped me up a bit because it is supposed to be with_labels - not with_label. The error message you will get for these missing s is the following for people who are searching:

---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
/tmp/ipykernel_7970/1878872865.py in <module>
      3 learn.dls.loaders.append(test_dl)
      4 
----> 5 interp = ClassificationInterpretation.from_learner(learn, ds_idx=2)
      6 interp.plot_confusion_matrix()

~/.local/lib/python3.7/site-packages/fastai/interpret.py in from_learner(cls, learn, ds_idx, dl, act)
     39         if dl is None: dl = learn.dls[ds_idx].new(shuffle=False, drop_last=False)
     40         _,_,losses = learn.get_preds(dl=dl, with_input=False, with_loss=True, with_decoded=False,
---> 41                                      with_preds=False, with_targs=False, act=act)
     42         return cls(learn, dl, losses, act)
     43 

~/.local/lib/python3.7/site-packages/fastai/learner.py in get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, reorder, cbs, **kwargs)
    253         if with_loss: ctx_mgrs.append(self.loss_not_reduced())
    254         with ContextManagers(ctx_mgrs):
--> 255             self._do_epoch_validate(dl=dl)
    256             if act is None: act = getattr(self.loss_func, 'activation', noop)
    257             res = cb.all_tensors()

~/.local/lib/python3.7/site-packages/fastai/learner.py in _do_epoch_validate(self, ds_idx, dl)
    201         if dl is None: dl = self.dls[ds_idx]
    202         self.dl = dl
--> 203         with torch.no_grad(): self._with_events(self.all_batches, 'validate', CancelValidException)
    204 
    205     def _do_epoch(self):

~/.local/lib/python3.7/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    161 
    162     def _with_events(self, f, event_type, ex, final=noop):
--> 163         try: self(f'before_{event_type}');  f()
    164         except ex: self(f'after_cancel_{event_type}')
    165         self(f'after_{event_type}');  final()

~/.local/lib/python3.7/site-packages/fastai/learner.py in all_batches(self)
    167     def all_batches(self):
    168         self.n_iter = len(self.dl)
--> 169         for o in enumerate(self.dl): self.one_batch(*o)
    170 
    171     def _do_one_batch(self):

~/.local/lib/python3.7/site-packages/fastai/learner.py in one_batch(self, i, b)
    192         b = self._set_device(b)
    193         self._split(b)
--> 194         self._with_events(self._do_one_batch, 'batch', CancelBatchException)
    195 
    196     def _do_epoch_train(self):

~/.local/lib/python3.7/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    163         try: self(f'before_{event_type}');  f()
    164         except ex: self(f'after_cancel_{event_type}')
--> 165         self(f'after_{event_type}');  final()
    166 
    167     def all_batches(self):

~/.local/lib/python3.7/site-packages/fastai/learner.py in __call__(self, event_name)
    139 
    140     def ordered_cbs(self, event): return [cb for cb in self.cbs.sorted('order') if hasattr(cb, event)]
--> 141     def __call__(self, event_name): L(event_name).map(self._call_one)
    142 
    143     def _call_one(self, event_name):

~/.local/lib/python3.7/site-packages/fastcore/foundation.py in map(self, f, gen, *args, **kwargs)
    153     def range(cls, a, b=None, step=None): return cls(range_of(a, b=b, step=step))
    154 
--> 155     def map(self, f, *args, gen=False, **kwargs): return self._new(map_ex(self, f, *args, gen=gen, **kwargs))
    156     def argwhere(self, f, negate=False, **kwargs): return self._new(argwhere(self, f, negate, **kwargs))
    157     def argfirst(self, f, negate=False): return first(i for i,o in self.enumerate() if f(o))

~/.local/lib/python3.7/site-packages/fastcore/basics.py in map_ex(iterable, f, gen, *args, **kwargs)
    777     res = map(g, iterable)
    778     if gen: return res
--> 779     return list(res)
    780 
    781 # Cell

~/.local/lib/python3.7/site-packages/fastcore/basics.py in __call__(self, *args, **kwargs)
    762             if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
    763         fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 764         return self.func(*fargs, **kwargs)
    765 
    766 # Cell

~/.local/lib/python3.7/site-packages/fastai/learner.py in _call_one(self, event_name)
    143     def _call_one(self, event_name):
    144         if not hasattr(event, event_name): raise Exception(f'missing {event_name}')
--> 145         for cb in self.cbs.sorted('order'): cb(event_name)
    146 
    147     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~/.local/lib/python3.7/site-packages/fastai/callback/core.py in __call__(self, event_name)
     55         res = None
     56         if self.run and _run:
---> 57             try: res = getattr(self, event_name, noop)()
     58             except (CancelBatchException, CancelEpochException, CancelFitException, CancelStepException, CancelTrainException, CancelValidException): raise
     59             except Exception as e:

~/.local/lib/python3.7/site-packages/fastai/callback/core.py in after_batch(self)
    135             torch.save(targs[0], self.save_targs/str(self.iter), pickle_protocol=self.pickle_protocol)
    136         if self.with_loss:
--> 137             bs = find_bs(self.yb)
    138             loss = self.loss if self.loss.numel() == bs else self.loss.view(bs,-1).mean(1)
    139             self.losses.append(self.learn.to_detach(loss))

~/.local/lib/python3.7/site-packages/fastai/torch_core.py in find_bs(b)
    568 def find_bs(b):
    569     "Recursively search the batch size of `b`."
--> 570     return item_find(b).shape[0]
    571 
    572 # Cell

~/.local/lib/python3.7/site-packages/fastai/torch_core.py in item_find(x, idx)
    554 def item_find(x, idx=0):
    555     "Recursively takes the `idx`-th element of `x`"
--> 556     if is_listy(x): return item_find(x[idx])
    557     if isinstance(x,dict):
    558         key = list(x.keys())[idx] if isinstance(idx, int) else idx

IndexError: Exception occured in `GatherPredsCallback` when calling event `after_batch`:
	tuple index out of range
1 Like

Do we call seed_everything() before creating the Learner or before calling fine_tune()?

Short answer:
Call seed_everything() in the very beginning of your code before you do anything else, so everything will be deterministic from that point.
You can check if everything works as intended, if you get the same loss for each run, then everything seems fine.

Longer answer:
The purpose of the seed_everything() method is to set the starting number (seed) for all random number generators (random, numpy, torch & underlying cuda too) + set cuDNN to deterministic mode (cuDNN is also used by Pytorch under the hood).

You want determinism from the beginning for repeatable experiments and because neural networks can have randomized parts (eg.: dropout), data sets & loaders usually also have randomized parts for every epoch, and so on, better to call seed_everything() before all of them.
(But you can call seed_everything() multiple times in your code if you want/need, it just sets the seed again and nothing wrong with that)

2 Likes

I tried out your suggestion, and it worked. (I think the trick is to call seed_everything() before creating the DataLoaders.)

Thanks for the answer.

1 Like

Wow! This saved me! I also got this error and have searched for the solution for quite some time…

1 Like