Label_delim error unhashable type list

fastai-v2.0.0

I have a DataFrame which looks like this:

Running the following causes the error below.
dls = TextDataLoaders.from_df(df_u, valid_pct=0.33, seed=0, bs=32, num_workers=0, label_delim=' ; ')
Without label_dim worked fine, though I would like to find out what causes the error.

Error:

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-338-5521d7276e89> in <module>
----> 1 dls = TextDataLoaders.from_df(df_u, valid_pct=0.33, seed=0, bs=32, num_workers=0, label_delim=' ; ')

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\text\data.py in from_df(cls, df, path, valid_pct, seed, text_col, label_col, label_delim, y_block, text_vocab, is_lm, valid_col, tok_tfm, seq_len, backwards, **kwargs)
    245                            get_y=None if is_lm else ColReader(label_col, label_delim=label_delim),
    246                            splitter=splitter)
--> 247         return cls.from_dblock(dblock, df, path=path, seq_len=seq_len, **kwargs)
    248 
    249     @classmethod

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\core.py in from_dblock(cls, dblock, source, path, bs, val_bs, shuffle_train, device, **kwargs)
    168     @classmethod
    169     def from_dblock(cls, dblock, source, path='.',  bs=64, val_bs=None, shuffle_train=True, device=None, **kwargs):
--> 170         return dblock.dataloaders(source, path=path, bs=bs, val_bs=val_bs, shuffle_train=shuffle_train, device=device, **kwargs)
    171 
    172     _docs=dict(__getitem__="Retrieve `DataLoader` at `i` (`0` is training, `1` is validation)",

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\block.py in dataloaders(self, source, path, verbose, **kwargs)
    111 
    112     def dataloaders(self, source, path='.', verbose=False, **kwargs):
--> 113         dsets = self.datasets(source)
    114         kwargs = {**self.dls_kwargs, **kwargs, 'verbose': verbose}
    115         return dsets.dataloaders(path=path, after_item=self.item_tfms, after_batch=self.batch_tfms, **kwargs)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\block.py in datasets(self, source, verbose)
    108         splits = (self.splitter or RandomSplitter())(items)
    109         pv(f"{len(splits)} datasets of sizes {','.join([str(len(s)) for s in splits])}", verbose)
--> 110         return Datasets(items, tfms=self._combine_type_tfms(), splits=splits, dl_type=self.dl_type, n_inp=self.n_inp, verbose=verbose)
    111 
    112     def dataloaders(self, source, path='.', verbose=False, **kwargs):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\core.py in __init__(self, items, tfms, tls, n_inp, dl_type, **kwargs)
    287     def __init__(self, items=None, tfms=None, tls=None, n_inp=None, dl_type=None, **kwargs):
    288         super().__init__(dl_type=dl_type)
--> 289         self.tls = L(tls if tls else [TfmdLists(items, t, **kwargs) for t in L(ifnone(tfms,[None]))])
    290         self.n_inp = ifnone(n_inp, max(1, len(self.tls)-1))
    291 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\core.py in <listcomp>(.0)
    287     def __init__(self, items=None, tfms=None, tls=None, n_inp=None, dl_type=None, **kwargs):
    288         super().__init__(dl_type=dl_type)
--> 289         self.tls = L(tls if tls else [TfmdLists(items, t, **kwargs) for t in L(ifnone(tfms,[None]))])
    290         self.n_inp = ifnone(n_inp, max(1, len(self.tls)-1))
    291 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __call__(cls, x, *args, **kwargs)
     45             return x
     46 
---> 47         res = super().__call__(*((x,) + args), **kwargs)
     48         res._newchk = 0
     49         return res

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\core.py in __init__(self, items, tfms, use_list, do_setup, split_idx, train_setup, splits, types, verbose, dl_type)
    224         if do_setup:
    225             pv(f"Setting up {self.tfms}", verbose)
--> 226             self.setup(train_setup=train_setup)
    227 
    228     def _new(self, items, split_idx=None, **kwargs):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\core.py in setup(self, train_setup)
    240 
    241     def setup(self, train_setup=True):
--> 242         self.tfms.setup(self, train_setup)
    243         if len(self) != 0:
    244             x = super().__getitem__(0) if self.splits is None else super().__getitem__(self.splits[0])[0]

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\transform.py in setup(self, items, train_setup)
    195         tfms = self.fs[:]
    196         self.fs.clear()
--> 197         for t in tfms: self.add(t,items, train_setup)
    198 
    199     def add(self,t, items=None, train_setup=False):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\transform.py in add(self, t, items, train_setup)
    198 
    199     def add(self,t, items=None, train_setup=False):
--> 200         t.setup(items, train_setup)
    201         self.fs.append(t)
    202 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\transform.py in setup(self, items, train_setup)
     76     def setup(self, items=None, train_setup=False):
     77         train_setup = train_setup if self.train_setup is None else self.train_setup
---> 78         return self.setups(getattr(items, 'train', items) if train_setup else items)
     79 
     80     def _call(self, fn, x, split_idx=None, **kwargs):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\dispatch.py in __call__(self, *args, **kwargs)
     97         if not f: return args[0]
     98         if self.inst is not None: f = MethodType(f, self.inst)
---> 99         return f(*args, **kwargs)
    100 
    101     def __get__(self, inst, owner):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\transforms.py in setups(self, dsets)
    236 
    237     def setups(self, dsets):
--> 238         if self.vocab is None and dsets is not None: self.vocab = CategoryMap(dsets, sort=self.sort, add_na=self.add_na)
    239         self.c = len(self.vocab)
    240 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\transforms.py in __init__(self, col, sort, add_na, strict)
    212             if not hasattr(col,'unique'): col = L(col, use_list=True)
    213             # `o==o` is the generalized definition of non-NaN used by Pandas
--> 214             items = L(o for o in col.unique() if o==o)
    215             if sort: items = items.sorted()
    216         self.items = '#na#' + items if add_na else items

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in unique(self)
    393         return self._new(i for i,o in enumerate(self) if f(o))
    394 
--> 395     def unique(self): return L(dict.fromkeys(self).keys())
    396     def enumerate(self): return L(enumerate(self))
    397     def val2idx(self): return {v:k for k,v in self.enumerate()}

TypeError: unhashable type: 'list'

It’s using Categorize rather than MultiCategorize. The DataBlock API would be better in this case as you can use MultiCategoryBlock (which is what you should use here)

1 Like

Thank you. I will try it out. Though what use does label_delim have at all with TextDataLoaders.from_df ?

Valid point, it’s most likely a bug. (I don’t use the one liners personally, so I need to look into that code)

I am actually having difficulty doing this, it would be very helpful if you could provide an example.

I tried this below:


error:

---------------------------------------------------------------------------
AssertionError                            Traceback (most recent call last)
<ipython-input-534-99ae1f5d705e> in <module>
----> 1 learn.fine_tune(4, 1e-2)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\utils.py in _f(*args, **kwargs)
    452         init_args.update(log)
    453         setattr(inst, 'init_args', init_args)
--> 454         return inst if to_return else f(*args, **kwargs)
    455     return _f
    456 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\callback\schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
    159     "Fine tune with `freeze` for `freeze_epochs` then with `unfreeze` from `epochs` using discriminative LR"
    160     self.freeze()
--> 161     self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
    162     base_lr /= 2
    163     self.unfreeze()

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\utils.py in _f(*args, **kwargs)
    452         init_args.update(log)
    453         setattr(inst, 'init_args', init_args)
--> 454         return inst if to_return else f(*args, **kwargs)
    455     return _f
    456 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\callback\schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
    111     scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
    112               'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 113     self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
    114 
    115 # Cell

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\utils.py in _f(*args, **kwargs)
    452         init_args.update(log)
    453         setattr(inst, 'init_args', init_args)
--> 454         return inst if to_return else f(*args, **kwargs)
    455     return _f
    456 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
    202             self.opt.set_hypers(lr=self.lr if lr is None else lr)
    203             self.n_epoch,self.loss = n_epoch,tensor(0.)
--> 204             self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
    205 
    206     def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _with_events(self, f, event_type, ex, final)
    153 
    154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
    156         except ex: self(f'after_cancel_{event_type}')
    157         finally:   self(f'after_{event_type}')        ;final()

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _do_fit(self)
    192         for epoch in range(self.n_epoch):
    193             self.epoch=epoch
--> 194             self._with_events(self._do_epoch, 'epoch', CancelEpochException)
    195 
    196     @log_args(but='cbs')

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _with_events(self, f, event_type, ex, final)
    153 
    154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
    156         except ex: self(f'after_cancel_{event_type}')
    157         finally:   self(f'after_{event_type}')        ;final()

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _do_epoch(self)
    187     def _do_epoch(self):
    188         self._do_epoch_train()
--> 189         self._do_epoch_validate()
    190 
    191     def _do_fit(self):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _do_epoch_validate(self, ds_idx, dl)
    183         if dl is None: dl = self.dls[ds_idx]
    184         self.dl = dl;
--> 185         with torch.no_grad(): self._with_events(self.all_batches, 'validate', CancelValidException)
    186 
    187     def _do_epoch(self):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _with_events(self, f, event_type, ex, final)
    153 
    154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
    156         except ex: self(f'after_cancel_{event_type}')
    157         finally:   self(f'after_{event_type}')        ;final()

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in all_batches(self)
    159     def all_batches(self):
    160         self.n_iter = len(self.dl)
--> 161         for o in enumerate(self.dl): self.one_batch(*o)
    162 
    163     def _do_one_batch(self):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in one_batch(self, i, b)
    174         self.iter = i
    175         self._split(b)
--> 176         self._with_events(self._do_one_batch, 'batch', CancelBatchException)
    177 
    178     def _do_epoch_train(self):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _with_events(self, f, event_type, ex, final)
    155         try:       self(f'before_{event_type}')       ;f()
    156         except ex: self(f'after_cancel_{event_type}')
--> 157         finally:   self(f'after_{event_type}')        ;final()
    158 
    159     def all_batches(self):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in __call__(self, event_name)
    131     def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
    132 
--> 133     def __call__(self, event_name): L(event_name).map(self._call_one)
    134 
    135     def _call_one(self, event_name):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in map(self, f, *args, **kwargs)
    381              else f.format if isinstance(f,str)
    382              else f.__getitem__)
--> 383         return self._new(map(g, self))
    384 
    385     def filter(self, f, negate=False, **kwargs):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in _new(self, items, *args, **kwargs)
    331     @property
    332     def _xtra(self): return None
--> 333     def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
    334     def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
    335     def copy(self): return self._new(self.items.copy())

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __call__(cls, x, *args, **kwargs)
     45             return x
     46 
---> 47         res = super().__call__(*((x,) + args), **kwargs)
     48         res._newchk = 0
     49         return res

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __init__(self, items, use_list, match, *rest)
    322         if items is None: items = []
    323         if (use_list is not None) or not _is_array(items):
--> 324             items = list(items) if use_list else _listify(items)
    325         if match is not None:
    326             if is_coll(match): match = len(match)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in _listify(o)
    258     if isinstance(o, list): return o
    259     if isinstance(o, str) or _is_array(o): return [o]
--> 260     if is_iter(o): return list(o)
    261     return [o]
    262 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __call__(self, *args, **kwargs)
    224             if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
    225         fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 226         return self.fn(*fargs, **kwargs)
    227 
    228 # Cell

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _call_one(self, event_name)
    135     def _call_one(self, event_name):
    136         assert hasattr(event, event_name), event_name
--> 137         [cb(event_name) for cb in sort_by_run(self.cbs)]
    138 
    139     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in <listcomp>(.0)
    135     def _call_one(self, event_name):
    136         assert hasattr(event, event_name), event_name
--> 137         [cb(event_name) for cb in sort_by_run(self.cbs)]
    138 
    139     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\callback\core.py in __call__(self, event_name)
     42                (self.run_valid and not getattr(self, 'training', False)))
     43         res = None
---> 44         if self.run and _run: res = getattr(self, event_name, noop)()
     45         if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
     46         return res

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in after_batch(self)
    436         if len(self.yb) == 0: return
    437         mets = self._train_mets if self.training else self._valid_mets
--> 438         for met in mets: met.accumulate(self.learn)
    439         if not self.training: return
    440         self.lrs.append(self.opt.hypers[-1]['lr'])

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in accumulate(self, learn)
    361     def accumulate(self, learn):
    362         bs = find_bs(learn.yb)
--> 363         self.total += to_detach(self.func(learn.pred, *learn.yb))*bs
    364         self.count += bs
    365     @property

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\metrics.py in accuracy(inp, targ, axis)
     98 def accuracy(inp, targ, axis=-1):
     99     "Compute accuracy with `targ` when `pred` is bs * n_classes"
--> 100     pred,targ = flatten_check(inp.argmax(dim=axis), targ)
    101     return (pred == targ).float().mean()
    102 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in flatten_check(inp, targ)
    748     "Check that `out` and `targ` have the same number of elements and flatten them."
    749     inp,targ = inp.contiguous().view(-1),targ.contiguous().view(-1)
--> 750     test_eq(len(inp), len(targ))
    751     return inp,targ

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\test.py in test_eq(a, b)
     30 def test_eq(a,b):
     31     "`test` that `a==b`"
---> 32     test(a,b,equals, '==')
     33 
     34 # Cell

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\test.py in test(a, b, cmp, cname)
     20     "`assert` that `cmp(a,b)`; display inputs and `cname or cmp.__name__` if it fails"
     21     if cname is None: cname=cmp.__name__
---> 22     assert cmp(a,b),f"{cname}:\n{a}\n{b}"
     23 
     24 # Cell

AssertionError: ==:
32
2272
2 Likes

You’re actually almost perfect! The only issue here is your metric. accuracy is used in single-label classification. We use accuracy_muilti when multiple labels can show up at once.

thank you, that worked out :slight_smile: now I have another error with Interpretation. What might be the issue here do you think?

error:

---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in to_concat(xs, dim)
    234     #   in this case we return a big list
--> 235     try:    return retain_type(torch.cat(xs, dim=dim), xs[0])
    236     except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])

RuntimeError: Sizes of tensors must match except in dimension 0. Got 308 and 227 in dimension 1 (The offending index is 1)

During handling of the above exception, another exception occurred:

RuntimeError                              Traceback (most recent call last)
<ipython-input-52-edb170aa43f7> in <module>
----> 1 interp = Interpretation.from_learner(learn)
      2 #interp.plot_top_losses(9)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\interpret.py in from_learner(cls, learn, ds_idx, dl, act)
     27         "Construct interpretation object from a learner"
     28         if dl is None: dl = learn.dls[ds_idx]
---> 29         return cls(dl, *learn.get_preds(dl=dl, with_input=True, with_loss=True, with_decoded=True, act=None))
     30 
     31     def top_losses(self, k=None, largest=True):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, reorder, cbs, **kwargs)
    230         if with_loss: ctx_mgrs.append(self.loss_not_reduced())
    231         with ContextManagers(ctx_mgrs):
--> 232             self._do_epoch_validate(dl=dl)
    233             if act is None: act = getattr(self.loss_func, 'activation', noop)
    234             res = cb.all_tensors()

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _do_epoch_validate(self, ds_idx, dl)
    183         if dl is None: dl = self.dls[ds_idx]
    184         self.dl = dl;
--> 185         with torch.no_grad(): self._with_events(self.all_batches, 'validate', CancelValidException)
    186 
    187     def _do_epoch(self):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _with_events(self, f, event_type, ex, final)
    155         try:       self(f'before_{event_type}')       ;f()
    156         except ex: self(f'after_cancel_{event_type}')
--> 157         finally:   self(f'after_{event_type}')        ;final()
    158 
    159     def all_batches(self):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in __call__(self, event_name)
    131     def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
    132 
--> 133     def __call__(self, event_name): L(event_name).map(self._call_one)
    134 
    135     def _call_one(self, event_name):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in map(self, f, *args, **kwargs)
    381              else f.format if isinstance(f,str)
    382              else f.__getitem__)
--> 383         return self._new(map(g, self))
    384 
    385     def filter(self, f, negate=False, **kwargs):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in _new(self, items, *args, **kwargs)
    331     @property
    332     def _xtra(self): return None
--> 333     def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
    334     def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
    335     def copy(self): return self._new(self.items.copy())

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __call__(cls, x, *args, **kwargs)
     45             return x
     46 
---> 47         res = super().__call__(*((x,) + args), **kwargs)
     48         res._newchk = 0
     49         return res

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __init__(self, items, use_list, match, *rest)
    322         if items is None: items = []
    323         if (use_list is not None) or not _is_array(items):
--> 324             items = list(items) if use_list else _listify(items)
    325         if match is not None:
    326             if is_coll(match): match = len(match)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in _listify(o)
    258     if isinstance(o, list): return o
    259     if isinstance(o, str) or _is_array(o): return [o]
--> 260     if is_iter(o): return list(o)
    261     return [o]
    262 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __call__(self, *args, **kwargs)
    224             if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
    225         fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 226         return self.fn(*fargs, **kwargs)
    227 
    228 # Cell

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in _call_one(self, event_name)
    135     def _call_one(self, event_name):
    136         assert hasattr(event, event_name), event_name
--> 137         [cb(event_name) for cb in sort_by_run(self.cbs)]
    138 
    139     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in <listcomp>(.0)
    135     def _call_one(self, event_name):
    136         assert hasattr(event, event_name), event_name
--> 137         [cb(event_name) for cb in sort_by_run(self.cbs)]
    138 
    139     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\callback\core.py in __call__(self, event_name)
     42                (self.run_valid and not getattr(self, 'training', False)))
     43         res = None
---> 44         if self.run and _run: res = getattr(self, event_name, noop)()
     45         if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
     46         return res

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\callback\core.py in after_validate(self)
    117         "Concatenate all recorded tensors"
    118         if not hasattr(self, 'preds'): return
--> 119         if self.with_input:     self.inputs  = detuplify(to_concat(self.inputs, dim=self.concat_dim))
    120         if not self.save_preds: self.preds   = detuplify(to_concat(self.preds, dim=self.concat_dim))
    121         if not self.save_targs: self.targets = detuplify(to_concat(self.targets, dim=self.concat_dim))

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in to_concat(xs, dim)
    229     "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
    230     if not xs: return xs
--> 231     if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
    232     if isinstance(xs[0],dict):  return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs[0].keys()}
    233     #We may receives xs that are not concatenatable (inputs of a text classifier for instance),

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in <listcomp>(.0)
    229     "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
    230     if not xs: return xs
--> 231     if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
    232     if isinstance(xs[0],dict):  return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs[0].keys()}
    233     #We may receives xs that are not concatenatable (inputs of a text classifier for instance),

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in to_concat(xs, dim)
    234     #   in this case we return a big list
    235     try:    return retain_type(torch.cat(xs, dim=dim), xs[0])
--> 236     except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
    237                           for i in range_of(o_)) for o_ in xs], L())
    238 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in <listcomp>(.0)
    234     #   in this case we return a big list
    235     try:    return retain_type(torch.cat(xs, dim=dim), xs[0])
--> 236     except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
    237                           for i in range_of(o_)) for o_ in xs], L())
    238 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __call__(cls, x, *args, **kwargs)
     45             return x
     46 
---> 47         res = super().__call__(*((x,) + args), **kwargs)
     48         res._newchk = 0
     49         return res

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in __init__(self, items, use_list, match, *rest)
    322         if items is None: items = []
    323         if (use_list is not None) or not _is_array(items):
--> 324             items = list(items) if use_list else _listify(items)
    325         if match is not None:
    326             if is_coll(match): match = len(match)

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\foundation.py in _listify(o)
    258     if isinstance(o, list): return o
    259     if isinstance(o, str) or _is_array(o): return [o]
--> 260     if is_iter(o): return list(o)
    261     return [o]
    262 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in <genexpr>(.0)
    234     #   in this case we return a big list
    235     try:    return retain_type(torch.cat(xs, dim=dim), xs[0])
--> 236     except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
    237                           for i in range_of(o_)) for o_ in xs], L())
    238 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in _f(self, *args, **kwargs)
    296         def _f(self, *args, **kwargs):
    297             cls = self.__class__
--> 298             res = getattr(super(TensorBase, self), fn)(*args, **kwargs)
    299             return retain_type(res, self, copy_meta=True)
    300         return _f

RuntimeError: index_select(): Expected dtype int64 for index

Not 100% sure, haven’t played with the multi-label Interp yet. BTW when posting the stack traces do so like in the previous post you had where it was in between those “```”. Helps make it readable for us :slight_smile:

Edited the prev post accordingly. I also receive error when I run learn.show_results()

error:

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-58-c3b657dcc9ae> in <module>
----> 1 learn.show_results()

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\learner.py in show_results(self, ds_idx, dl, max_n, shuffle, **kwargs)
    256         b = dl.one_batch()
    257         _,_,preds = self.get_preds(dl=[b], with_decoded=True)
--> 258         self.dls.show_results(b, preds, max_n=max_n, **kwargs)
    259 
    260     def show_training_loop(self):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\core.py in show_results(self, b, out, max_n, ctxs, show, **kwargs)
    109         res = (x,x1,None,None) if its is None else (x, y, its, outs.itemgot(slice(self.n_inp,None)))
    110         if not show: return res
--> 111         show_results(*res, ctxs=ctxs, max_n=max_n, **kwargs)
    112 
    113     @property

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastcore\dispatch.py in __call__(self, *args, **kwargs)
     97         if not f: return args[0]
     98         if self.inst is not None: f = MethodType(f, self.inst)
---> 99         return f(*args, **kwargs)
    100 
    101     def __get__(self, inst, owner):

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\text\learner.py in show_results(x, y, samples, outs, ctxs, max_n, trunc_at, **kwargs)
    251     if ctxs is None: ctxs = get_empty_df(min(len(samples), max_n))
    252     samples = L((s[0].truncate(trunc_at),*s[1:]) for s in samples)
--> 253     ctxs = show_results[object](x, y, samples, outs, ctxs=ctxs, max_n=max_n, **kwargs)
    254     display_df(pd.DataFrame(ctxs))
    255     return ctxs

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\core.py in show_results(x, y, samples, outs, ctxs, max_n, **kwargs)
     26         ctxs = [b.show(ctx=c, **kwargs) for b,c,_ in zip(samples.itemgot(i),ctxs,range(max_n))]
     27     for i in range(len(outs[0])):
---> 28         ctxs = [b.show(ctx=c, **kwargs) for b,c,_ in zip(outs.itemgot(i),ctxs,range(max_n))]
     29     return ctxs
     30 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\core.py in <listcomp>(.0)
     26         ctxs = [b.show(ctx=c, **kwargs) for b,c,_ in zip(samples.itemgot(i),ctxs,range(max_n))]
     27     for i in range(len(outs[0])):
---> 28         ctxs = [b.show(ctx=c, **kwargs) for b,c,_ in zip(outs.itemgot(i),ctxs,range(max_n))]
     29     return ctxs
     30 

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\data\transforms.py in show(self, ctx, sep, color, **kwargs)
    264 class MultiCategory(L):
    265     def show(self, ctx=None, sep=';', color='black', **kwargs):
--> 266         return show_title(sep.join(self.map(str)), ctx=ctx, color=color, **kwargs)
    267 
    268 # Cell

~\AppData\Local\Continuum\anaconda3\envs\dev\lib\site-packages\fastai\torch_core.py in show_title(o, ax, ctx, label, color, **kwargs)
    409         ax.set_title(o, color=color)
    410     elif isinstance(ax, pd.Series):
--> 411         while label in ax: label += '_'
    412         ax = ax.append(pd.Series({label: o}))
    413     return ax

TypeError: unsupported operand type(s) for +=: 'NoneType' and 'str'