Error in learn.get_preds() with data from the data frame column

Hi,

I have the following structure of the data frame:
image
with the following data:

I want the fastai model to predict tool labels for all the rows in the table. So I use the following code to read the image_id values and pass it to get_preds() function:

m = load_learner("models/teacher/convnext.pkl", cpu=False)
m.to(torch.device('cuda'))
dl = m.dls.test_dl(train_df['image_id'].values)

When I run the get_preds() function using the following command:
probs,targs,preds,losses=m.get_preds(dl=dl,with_input=False, with_loss=True, with_decoded=True)

I get the following error:


IndexError Traceback (most recent call last)
Cell In [45], line 1
----> 1 probs,targs,preds,losses=m.get_preds(dl=dl,with_input=False, with_loss=True, with_decoded=True)

File ~/mambaforge/lib/python3.10/site-packages/fastai/learner.py:300, in Learner.get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, reorder, cbs, **kwargs)
298 if with_loss: ctx_mgrs.append(self.loss_not_reduced())
299 with ContextManagers(ctx_mgrs):
→ 300 self._do_epoch_validate(dl=dl)
301 if act is None: act = getcallable(self.loss_func, ‘activation’)
302 res = cb.all_tensors()

File ~/mambaforge/lib/python3.10/site-packages/fastai/learner.py:236, in Learner._do_epoch_validate(self, ds_idx, dl)
234 if dl is None: dl = self.dls[ds_idx]
235 self.dl = dl
→ 236 with torch.no_grad(): self._with_events(self.all_batches, ‘validate’, CancelValidException)

File ~/mambaforge/lib/python3.10/site-packages/fastai/learner.py:193, in Learner.with_events(self, f, event_type, ex, final)
192 def with_events(self, f, event_type, ex, final=noop):
→ 193 try: self(f’before
{event_type}'); f()
194 except ex: self(f’after_cancel
{event_type}‘)
195 self(f’after_{event_type}’); final()

File ~/mambaforge/lib/python3.10/site-packages/fastai/learner.py:199, in Learner.all_batches(self)
197 def all_batches(self):
198 self.n_iter = len(self.dl)
→ 199 for o in enumerate(self.dl): self.one_batch(*o)

File ~/mambaforge/lib/python3.10/site-packages/fastai/learner.py:227, in Learner.one_batch(self, i, b)
225 b = self._set_device(b)
226 self._split(b)
→ 227 self._with_events(self._do_one_batch, ‘batch’, CancelBatchException)

File ~/mambaforge/lib/python3.10/site-packages/fastai/learner.py:195, in Learner.with_events(self, f, event_type, ex, final)
193 try: self(f’before
{event_type}‘); f()
194 except ex: self(f’after_cancel_{event_type}’)
→ 195 self(f’after_{event_type}'); final()

File ~/mambaforge/lib/python3.10/site-packages/fastai/learner.py:171, in Learner.call(self, event_name)
→ 171 def call(self, event_name): L(event_name).map(self._call_one)

File ~/mambaforge/lib/python3.10/site-packages/fastcore/foundation.py:156, in L.map(self, f, gen, *args, **kwargs)
→ 156 def map(self, f, *args, gen=False, **kwargs): return self._new(map_ex(self, f, *args, gen=gen, **kwargs))

File ~/mambaforge/lib/python3.10/site-packages/fastcore/basics.py:840, in map_ex(iterable, f, gen, *args, **kwargs)
838 res = map(g, iterable)
839 if gen: return res
→ 840 return list(res)

File ~/mambaforge/lib/python3.10/site-packages/fastcore/basics.py:825, in bind.call(self, *args, **kwargs)
823 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
824 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
→ 825 return self.func(*fargs, **kwargs)

File ~/mambaforge/lib/python3.10/site-packages/fastai/learner.py:175, in Learner._call_one(self, event_name)
173 def _call_one(self, event_name):
174 if not hasattr(event, event_name): raise Exception(f’missing {event_name}')
→ 175 for cb in self.cbs.sorted(‘order’): cb(event_name)

File ~/mambaforge/lib/python3.10/site-packages/fastai/callback/core.py:62, in Callback.call(self, event_name)
60 try: res = getcallable(self, event_name)()
61 except (CancelBatchException, CancelBackwardException, CancelEpochException, CancelFitException, CancelStepException, CancelTrainException, CancelValidException): raise
—> 62 except Exception as e: raise modify_exception(e, f’Exception occured in {self.__class__.__name__} when calling event {event_name}:\n\t{e.args[0]}', replace=True)
63 if event_name==‘after_fit’: self.run=True #Reset self.run to True at each end of fit
64 return res

File ~/mambaforge/lib/python3.10/site-packages/fastai/callback/core.py:60, in Callback.call(self, event_name)
58 res = None
59 if self.run and _run:
—> 60 try: res = getcallable(self, event_name)()
61 except (CancelBatchException, CancelBackwardException, CancelEpochException, CancelFitException, CancelStepException, CancelTrainException, CancelValidException): raise
62 except Exception as e: raise modify_exception(e, f’Exception occured in {self.__class__.__name__} when calling event {event_name}:\n\t{e.args[0]}', replace=True)

File ~/mambaforge/lib/python3.10/site-packages/fastai/callback/core.py:148, in GatherPredsCallback.after_batch(self)
146 torch.save(targs[0], self.save_targs/str(self.iter), pickle_protocol=self.pickle_protocol)
147 if self.with_loss:
→ 148 bs = find_bs(self.yb)
149 loss = self.loss if self.loss.numel() == bs else self.loss.view(bs,-1).mean(1)
150 self.losses.append(self.learn.to_detach(loss))

File ~/mambaforge/lib/python3.10/site-packages/fastai/torch_core.py:608, in find_bs(b)
606 def find_bs(b):
607 “Recursively search the batch size of b.”
→ 608 res = item_find(b)
609 if not hasattr(res, “shape”): return len(b)
610 return res.shape[0]

File ~/mambaforge/lib/python3.10/site-packages/fastai/torch_core.py:594, in item_find(x, idx)
592 def item_find(x, idx=0):
593 “Recursively takes the idx-th element of x
→ 594 if is_listy(x): return item_find(x[idx])
595 if isinstance(x,dict):
596 key = list(x.keys())[idx] if isinstance(idx, int) else idx

IndexError: Exception occured in GatherPredsCallback when calling event after_batch:
tuple index out of range

Does anyony know what is wrong in the code?

Many thanks and

Kind regards,
Bilal

Seems like you are doing multi-class classification.
The default separator for the labels is a “space” and not a “comma” as the labels column contents seem to be.

Left as an exercise to find the code to override the default separator. :slight_smile: