AttributeError when running learn.loss_func

Hi,
I have a private multilabel dataset for text classification. I ran into AttributeError at running learn.loss_func.

Here my dataset looks like:

dls_class_block = DataBlock(
    blocks=(TextBlock.from_df('proc_name', res_col_name='proc_name', vocab=dls_lm.vocab), 
            MultiCategorize),
    get_x=ColReader('proc_name'),
    get_y=ColReader('cur_job_func', label_delim='|'),
    splitter=ColSplitter(col='is_valid')
)
dls_class = dls_class_block.dataloaders(data, bs=5)
dls_class.one_batch()

------------------------------------------------------------------------
(TensorText([[  2,  26,  11,  32,  48,  15, 337,  29,  32,  88,  85,  45, 140, 155,
           85,  45, 140, 337],
         [  2,  11,  13,   9,  61, 209,  95,  16,  95,  16, 310, 280,  95,  16,
          201,   1,   1,   1],
         [  2,  11,  32,  26, 298,  30, 100,   0,   0,  30,  55, 297, 298,   0,
            0,   1,   1,   1],
         [  2,  26,  11,  32,  11,  96,  46,  20,  97,  52, 300, 345,  64,   1,
            1,   1,   1,   1],
         [  2,  15,  31,   9,  84,  80,  10,  12,  31, 159, 254, 108, 103,   1,
            1,   1,   1,   1]]),
 [('NA', 'NA', 'NA', 'NA', 'InformationTechnology')])

Here is the error detail:

learn = text_classifier_learner(
    dls_class, 
    AWD_LSTM, 
    drop_mult=0.5, 
    loss_func=BCEWithLogitsLossFlat(), 
    metrics=[accuracy_multi, metrics.precision_score, F1ScoreMulti()]
)
x, y = dls_class.one_batch()
pred = learn.model(x)
learn.loss_func(pred, y)

---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-78-72eba93017a5> in <module>
----> 1 learn.loss_func(pred, y)

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastai/layers.py in __call__(self, inp, targ, **kwargs)
    290 
    291     def __call__(self, inp, targ, **kwargs):
--> 292         inp  = inp .transpose(self.axis,-1).contiguous()
    293         targ = targ.transpose(self.axis,-1).contiguous()
    294         if self.floatify and targ.dtype!=torch.float16: targ = targ.float()

AttributeError: 'tuple' object has no attribute 'transpose'

I have tried other loss function CrossEntropyLossFlat and the same error. My guess is the underlying data issue. This could also relevant - I got RecursionError when running dls_class.show_batch(). Here is the message.

dls_class.show_batch()
---------------------------------------------------------------------------
RecursionError                            Traceback (most recent call last)
<ipython-input-89-6e53cf6f0709> in <module>
----> 1 dls_class.show_batch()

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastai/data/core.py in show_batch(self, b, max_n, ctxs, show, unique, **kwargs)
    100         if b is None: b = self.one_batch()
    101         if not show: return self._pre_show_batch(b, max_n=max_n)
--> 102         show_batch(*self._pre_show_batch(b, max_n=max_n), ctxs=ctxs, max_n=max_n, **kwargs)
    103         if unique: self.get_idxs = old_get_idxs
    104 

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastai/data/core.py in _pre_show_batch(self, b, max_n)
     90         b = self.decode(b)
     91         if hasattr(b, 'show'): return b,None,None
---> 92         its = self._decode_batch(b, max_n, full=False)
     93         if not is_listy(b): b,its = [b],L((o,) for o in its)
     94         return detuplify(b[:self.n_inp]),detuplify(b[self.n_inp:]),its

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastai/data/core.py in _decode_batch(self, b, max_n, full)
     84         f = self.after_item.decode
     85         f = compose(f, partial(getattr(self.dataset,'decode',noop), full = full))
---> 86         return L(batch_to_samples(b, max_n=max_n)).map(f)
     87 
     88     def _pre_show_batch(self, b, max_n=9):

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastai/torch_core.py in batch_to_samples(b, max_n)
    569     if isinstance(b, Tensor): return retain_types(list(b[:max_n]), [b])
    570     else:
--> 571         res = L(b).map(partial(batch_to_samples,max_n=max_n))
    572         return retain_types(res.zip(), [b])
    573 

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
    381              else f.format if isinstance(f,str)
    382              else f.__getitem__)
--> 383         return self._new(map(g, self))
    384 
    385     def filter(self, f, negate=False, **kwargs):

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
    331     @property
    332     def _xtra(self): return None
--> 333     def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
    334     def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
    335     def copy(self): return self._new(self.items.copy())

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
     45             return x
     46 
---> 47         res = super().__call__(*((x,) + args), **kwargs)
     48         res._newchk = 0
     49         return res

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
    322         if items is None: items = []
    323         if (use_list is not None) or not _is_array(items):
--> 324             items = list(items) if use_list else _listify(items)
    325         if match is not None:
    326             if is_coll(match): match = len(match)

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastcore/foundation.py in _listify(o)
    235     if isinstance(o, list): return o
    236     if isinstance(o, str) or _is_array(o): return [o]
--> 237     if is_iter(o): return list(o)
    238     return [o]
    239 

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastcore/foundation.py in __call__(self, *args, **kwargs)
    298             if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
    299         fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 300         return self.fn(*fargs, **kwargs)
    301 
    302 # Cell

... last 7 frames repeated, from the frame below ...

~/.pyenv/versions/3.8.2/lib/python3.8/site-packages/fastai/torch_core.py in batch_to_samples(b, max_n)
    569     if isinstance(b, Tensor): return retain_types(list(b[:max_n]), [b])
    570     else:
--> 571         res = L(b).map(partial(batch_to_samples,max_n=max_n))
    572         return retain_types(res.zip(), [b])
    573 

RecursionError: maximum recursion depth exceeded while calling a Python object

Any suggestion would be appreciate. Thanks!

Looks like the second param should be MultiCategoryBlock instead of MultiCategorize

I have got a similar issue. My code is as mentioned below–>

ds4lm=DataBlock(blocks=TextBlock.from_folder(opdir, is_lm=True),
             get_items=get_text_files,
             splitter=RandomSplitter(0.1))

dl4lm=ds4lm.dataloaders(opdir, bs=4)

config = awd_lstm_lm_config.copy()
config.update({'emb_sz': 500, 'n_hid': 500})
model = get_language_model(AWD_LSTM, vocab_sz=len(dl4lm.vocab), 
config=config)

opt_func = partial(Adam, wd=0.1, eps=1e-7)
cbs = MixedPrecision()

lmlearner=Learner(dl4lm, model,loss_func=CrossEntropyLossFlat(), 
opt_func=opt_func, cbs=cbs, metrics=[accuracy, Perplexity()])

lmlearner.fit_one_cycle(1, 5e-3, moms=(0.8,0.7,0.8), div=10)

Here I get the following error–>

---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
<ipython-input-43-e419090103c1> in <module>
----> 1 lmlearner.fit_one_cycle(1, 5e-3, moms=(0.8,0.7,0.8), div=10)

/opt/conda/lib/python3.7/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
 54         init_args.update(log)
 55         setattr(inst, 'init_args', init_args)
---> 56         return inst if to_return else f(*args, **kwargs)
 57     return _f

/opt/conda/lib/python3.7/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
111     scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
112               'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 113     self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
114 
115 # Cell

/opt/conda/lib/python3.7/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
 54         init_args.update(log)
 55         setattr(inst, 'init_args', init_args)
---> 56         return inst if to_return else f(*args, **kwargs)
 57     return _f

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
205             self.opt.set_hypers(lr=self.lr if lr is None else lr)
206             self.n_epoch = n_epoch
--> 207             self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
208 
209     def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
153 
154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
156         except ex: self(f'after_cancel_{event_type}')
157         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_fit(self)
195         for epoch in range(self.n_epoch):
196             self.epoch=epoch
--> 197             self._with_events(self._do_epoch, 'epoch', CancelEpochException)
198 
199     @log_args(but='cbs')

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
153 
154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
156         except ex: self(f'after_cancel_{event_type}')
157         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_epoch(self)
189 
190     def _do_epoch(self):
--> 191         self._do_epoch_train()
192         self._do_epoch_validate()
193 

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_epoch_train(self)
181     def _do_epoch_train(self):
182         self.dl = self.dls.train
--> 183         self._with_events(self.all_batches, 'train', CancelTrainException)
184 
185     def _do_epoch_validate(self, ds_idx=1, dl=None):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
153 
154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
156         except ex: self(f'after_cancel_{event_type}')
157         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in all_batches(self)
159     def all_batches(self):
160         self.n_iter = len(self.dl)
--> 161         for o in enumerate(self.dl): self.one_batch(*o)
162 
163     def _do_one_batch(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in one_batch(self, i, b)
177         self.iter = i
178         self._split(b)
--> 179         self._with_events(self._do_one_batch, 'batch', CancelBatchException)
180 
181     def _do_epoch_train(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
153 
154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
156         except ex: self(f'after_cancel_{event_type}')
157         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_one_batch(self)
164         self.pred = self.model(*self.xb)
165         self('after_pred')
--> 166         if len(self.yb): self.loss = self.loss_func(self.pred, *self.yb)
167         self('after_loss')
168         if not self.training or not len(self.yb): return

/opt/conda/lib/python3.7/site-packages/fastai/losses.py in __call__(self, inp, targ, **kwargs)
 27 
 28     def __call__(self, inp, targ, **kwargs):
---> 29         inp  = inp .transpose(self.axis,-1).contiguous()
 30         targ = targ.transpose(self.axis,-1).contiguous()
 31         if self.floatify and targ.dtype!=torch.float16: targ = targ.float()

AttributeError: 'tuple' object has no attribute 'transpose'

The stacktrace is pointing to the loss function but I am not able to figure out the cause of this error. Is it something to do with loss function or its it because of the data?