'TensorCategory' and 'TensorText' when using AWD_LSTM for text classification

Hey all,

I am following the NLP chapter from fastbook on my own dataset, trying to keep my code as close to the original as possible,

I can train my language model just fine with no hiccups, the problem comes when I try to train my classifier and after loading the LM weights, and trying to fit the learner, I get this error:

TypeError: unsupported operand type(s) for +=: 'TensorCategory' and 'TensorText'

Here is a link to my Colab notebook and the fastai version 2.1.6

Thanks for the help!

1 Like

I get this same error in lesson 1 IMDB code unmodifed:

The code:
from fastai.text.all import *

dls = TextDataLoaders.from_folder(untar_data(URLs.IMDB), valid='test')
learn = text_classifier_learner(dls, AWD_LSTM, drop_mult=0.5, metrics=accuracy)
learn.fine_tune(4, 1e-2)

The error, after downloading IMDB data, extracting, etc

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-4-5ab79cd5e866> in <module>
      3 dls = TextDataLoaders.from_folder(untar_data(URLs.IMDB), valid='test')
      4 learn = text_classifier_learner(dls, AWD_LSTM, drop_mult=0.5, metrics=accuracy)
----> 5 learn.fine_tune(4, 1e-2)

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
    155     "Fine tune with `freeze` for `freeze_epochs` then with `unfreeze` from `epochs` using discriminative LR"
    156     self.freeze()
--> 157     self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
    158     base_lr /= 2
    159     self.unfreeze()

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
    110     scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
    111               'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 112     self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
    113 
    114 # Cell

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
    203             self.opt.set_hypers(lr=self.lr if lr is None else lr)
    204             self.n_epoch = n_epoch
--> 205             self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
    206 
    207     def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _do_fit(self)
    194         for epoch in range(self.n_epoch):
    195             self.epoch=epoch
--> 196             self._with_events(self._do_epoch, 'epoch', CancelEpochException)
    197 
    198     def fit(self, n_epoch, lr=None, wd=None, cbs=None, reset_opt=False):

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _do_epoch(self)
    188 
    189     def _do_epoch(self):
--> 190         self._do_epoch_train()
    191         self._do_epoch_validate()
    192 

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _do_epoch_train(self)
    180     def _do_epoch_train(self):
    181         self.dl = self.dls.train
--> 182         self._with_events(self.all_batches, 'train', CancelTrainException)
    183 
    184     def _do_epoch_validate(self, ds_idx=1, dl=None):

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in all_batches(self)
    158     def all_batches(self):
    159         self.n_iter = len(self.dl)
--> 160         for o in enumerate(self.dl): self.one_batch(*o)
    161 
    162     def _do_one_batch(self):

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in one_batch(self, i, b)
    176         self.iter = i
    177         self._split(b)
--> 178         self._with_events(self._do_one_batch, 'batch', CancelBatchException)
    179 
    180     def _do_epoch_train(self):

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _do_one_batch(self)
    164         self('after_pred')
    165         if len(self.yb): self.loss = self.loss_func(self.pred, *self.yb)
--> 166         self('after_loss')
    167         if not self.training or not len(self.yb): return
    168         self('before_backward')

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in __call__(self, event_name)
    130     def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
    131 
--> 132     def __call__(self, event_name): L(event_name).map(self._call_one)
    133 
    134     def _call_one(self, event_name):

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastcore/foundation.py in map(self, f, gen, *args, **kwargs)
    177     def range(cls, a, b=None, step=None): return cls(range_of(a, b=b, step=step))
    178 
--> 179     def map(self, f, *args, gen=False, **kwargs): return self._new(map_ex(self, f, *args, gen=gen, **kwargs))
    180     def argwhere(self, f, negate=False, **kwargs): return self._new(argwhere(self, f, negate, **kwargs))
    181     def filter(self, f=noop, negate=False, gen=False, **kwargs):

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastcore/basics.py in map_ex(iterable, f, gen, *args, **kwargs)
    604     res = map(g, iterable)
    605     if gen: return res
--> 606     return list(res)
    607 
    608 # Cell

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastcore/basics.py in __call__(self, *args, **kwargs)
    594             if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
    595         fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 596         return self.func(*fargs, **kwargs)
    597 
    598 # Cell

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in _call_one(self, event_name)
    134     def _call_one(self, event_name):
    135         assert hasattr(event, event_name), event_name
--> 136         [cb(event_name) for cb in sort_by_run(self.cbs)]
    137 
    138     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/learner.py in <listcomp>(.0)
    134     def _call_one(self, event_name):
    135         assert hasattr(event, event_name), event_name
--> 136         [cb(event_name) for cb in sort_by_run(self.cbs)]
    137 
    138     def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/callback/core.py in __call__(self, event_name)
     42                (self.run_valid and not getattr(self, 'training', False)))
     43         res = None
---> 44         if self.run and _run: res = getattr(self, event_name, noop)()
     45         if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
     46         return res

~/anaconda3/envs/machineLearning/lib/python3.8/site-packages/fastai/callback/rnn.py in after_loss(self)
     31     def after_loss(self):
     32         if not self.training: return
---> 33         if self.alpha != 0.:  self.learn.loss += self.alpha * self.out.float().pow(2).mean()
     34         if self.beta != 0.:
     35             h = self.raw_out

TypeError: unsupported operand type(s) for +=: 'TensorCategory' and 'TensorText'

Can you go ahead and open a GitHub issue for this please? :slight_smile: thanks!

Yes, I got exactly the same error following the first notebook from the fastai course and trying to train data on IMDb reviews. It’s the code from 01_intro.ipynb:

learn = text_classifier_learner(dls, AWD_LSTM, drop_mult=0.5, metrics=error_rate)
learn.fine_tune(4, 1e-2)
epoch	train_loss	valid_loss	error_rate	time
0	0.785647	00:00
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-8-f3e4bf6dc0bf> in <module>()
      1 learn = text_classifier_learner(dls1, AWD_LSTM, drop_mult=0.5, metrics=error_rate)
----> 2 learn.fine_tune(4, 1e-2)

20 frames
/usr/local/lib/python3.6/dist-packages/fastai/callback/rnn.py in after_loss(self)
     31     def after_loss(self):
     32         if not self.training: return
---> 33         if self.alpha != 0.:  self.learn.loss += self.alpha * self.out.float().pow(2).mean()
     34         if self.beta != 0.:
     35             h = self.raw_out

TypeError: unsupported operand type(s) for +=: 'TensorCategory' and 'TensorText'

I created an issue in GitHub https://github.com/fastai/fastai/issues/3027

1 Like

This has now been fixed in the latest pip release, thanks!

1 Like

I can confirm it’s fixed over here. Thank you very much!

I still have the problem. I’m trying a weighted cross entropy given that my 5 classes are highly imbalanced.
import torch.nn
sum_counts = 198000
counts = np.array([ 6299, 6895, 15973, 39130, 129703])
weights = counts/sum_counts
weights = torch.FloatTensor(weights).cuda()
learn.loss_func = nn.CrossEntropyLoss(weight=weights)

learn.fit_one_cycle(1, 0.01)

Full error:

TypeError Traceback (most recent call last)
in
1 ############################################## CHANGING IT UP ###################################
----> 2 learn.fit_one_cycle(1, 0.01)

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
110 scheds = {‘lr’: combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
111 ‘mom’: combined_cos(pct_start, *(self.moms if moms is None else moms))}
–> 112 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
113
114 # Cell

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
203 self.opt.set_hypers(lr=self.lr if lr is None else lr)
204 self.n_epoch = n_epoch
–> 205 self._with_events(self._do_fit, ‘fit’, CancelFitException, self._end_cleanup)
206
207 def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
152
153 def with_events(self, f, event_type, ex, final=noop):
–> 154 try: self(f’before
{event_type}’) ;f()
155 except ex: self(f’after_cancel
{event_type}’)
156 finally: self(f’after_{event_type}’) ;final()

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_fit(self)
194 for epoch in range(self.n_epoch):
195 self.epoch=epoch
–> 196 self._with_events(self._do_epoch, ‘epoch’, CancelEpochException)
197
198 def fit(self, n_epoch, lr=None, wd=None, cbs=None, reset_opt=False):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
152
153 def with_events(self, f, event_type, ex, final=noop):
–> 154 try: self(f’before
{event_type}’) ;f()
155 except ex: self(f’after_cancel
{event_type}’)
156 finally: self(f’after_{event_type}’) ;final()

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_epoch(self)
188
189 def _do_epoch(self):
–> 190 self._do_epoch_train()
191 self._do_epoch_validate()
192

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_epoch_train(self)
180 def _do_epoch_train(self):
181 self.dl = self.dls.train
–> 182 self._with_events(self.all_batches, ‘train’, CancelTrainException)
183
184 def _do_epoch_validate(self, ds_idx=1, dl=None):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
152
153 def with_events(self, f, event_type, ex, final=noop):
–> 154 try: self(f’before
{event_type}’) ;f()
155 except ex: self(f’after_cancel
{event_type}’)
156 finally: self(f’after_{event_type}’) ;final()

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in all_batches(self)
158 def all_batches(self):
159 self.n_iter = len(self.dl)
–> 160 for o in enumerate(self.dl): self.one_batch(*o)
161
162 def _do_one_batch(self):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in one_batch(self, i, b)
176 self.iter = i
177 self._split(b)
–> 178 self._with_events(self._do_one_batch, ‘batch’, CancelBatchException)
179
180 def _do_epoch_train(self):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in with_events(self, f, event_type, ex, final)
152
153 def with_events(self, f, event_type, ex, final=noop):
–> 154 try: self(f’before
{event_type}’) ;f()
155 except ex: self(f’after_cancel
{event_type}’)
156 finally: self(f’after_{event_type}’) ;final()

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_one_batch(self)
164 self(‘after_pred’)
165 if len(self.yb): self.loss = self.loss_func(self.pred, *self.yb)
–> 166 self(‘after_loss’)
167 if not self.training or not len(self.yb): return
168 self(‘before_backward’)

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in call(self, event_name)
130 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
131
–> 132 def call(self, event_name): L(event_name).map(self._call_one)
133
134 def _call_one(self, event_name):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastcore/foundation.py in map(self, f, gen, *args, **kwargs)
177 def range(cls, a, b=None, step=None): return cls(range_of(a, b=b, step=step))
178
–> 179 def map(self, f, *args, gen=False, **kwargs): return self._new(map_ex(self, f, *args, gen=gen, **kwargs))
180 def argwhere(self, f, negate=False, **kwargs): return self._new(argwhere(self, f, negate, **kwargs))
181 def filter(self, f=noop, negate=False, gen=False, **kwargs):

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastcore/basics.py in map_ex(iterable, f, gen, *args, **kwargs)
605 res = map(g, iterable)
606 if gen: return res
–> 607 return list(res)
608
609 # Cell

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastcore/basics.py in call(self, *args, **kwargs)
595 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
596 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
–> 597 return self.func(*fargs, **kwargs)
598
599 # Cell

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _call_one(self, event_name)
134 def _call_one(self, event_name):
135 assert hasattr(event, event_name), event_name
–> 136 [cb(event_name) for cb in sort_by_run(self.cbs)]
137
138 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in (.0)
134 def _call_one(self, event_name):
135 assert hasattr(event, event_name), event_name
–> 136 [cb(event_name) for cb in sort_by_run(self.cbs)]
137
138 def _bn_bias_state(self, with_bias): return norm_bias_params(self.model, with_bias).map(self.opt.state)

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/callback/core.py in call(self, event_name)
42 (self.run_valid and not getattr(self, ‘training’, False)))
43 res = None
—> 44 if self.run and _run: res = getattr(self, event_name, noop)()
45 if event_name==‘after_fit’: self.run=True #Reset self.run to True at each end of fit
46 return res

/opt/conda/envs/fastai/lib/python3.8/site-packages/fastai/callback/rnn.py in after_loss(self)
31 def after_loss(self):
32 if not self.training: return
—> 33 if self.alpha != 0.: self.learn.loss += self.alpha * self.out.float().pow(2).mean()
34 if self.beta != 0.:
35 h = self.raw_out

TypeError: unsupported operand type(s) for +=: ‘TensorCategory’ and ‘TensorText’

Try this A walk with fastai2 - Vision - Study Group and Online Lectures Megathread