AttributeError: 'LSTM' object has no attribute 'proj_size'

I am getting this error when running the command learn.predict().

I have a trained model in production (trained on fastaiv1). Because it was trained before the switch to fastai v2, I am using torch==1.4.0 and fastai==1.0.60 in order to load my trained model and run it in the pipeline without retraining the model using fastai v2.

As of a week ago, when I run learn.predict, I am getting this error:

AttributeError Traceback (most recent call last)
in
----> 1 learn.predict(‘How can I find bios?’)

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/fastai/basic_train.py in predict(self, item, return_x, batch_first, with_dropout, **kwargs)
373 “Return predicted class, label and probabilities for item.”
374 batch = self.data.one_item(item)
–> 375 res = self.pred_batch(batch=batch, with_dropout=with_dropout)
376 raw_pred,x = grab_idx(res,0,batch_first=batch_first),batch[0]
377 norm = getattr(self.data,‘norm’,False)

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/fastai/basic_train.py in pred_batch(self, ds_type, batch, reconstruct, with_dropout)
352 xb,yb = cb_handler.on_batch_begin(xb,yb, train=False)
353 with torch.no_grad():
–> 354 if not with_dropout: preds = loss_batch(self.model.eval(), xb, yb, cb_handler=cb_handler)
355 else: preds = loss_batch(self.model.eval().apply(self.apply_dropout), xb, yb, cb_handler=cb_handler)
356 res = _loss_func2activ(self.loss_func)(preds[0])

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/fastai/basic_train.py in loss_batch(model, xb, yb, loss_func, opt, cb_handler)
24 if not is_listy(xb): xb = [xb]
25 if not is_listy(yb): yb = [yb]
—> 26 out = model(*xb)
27 out = cb_handler.on_loss_begin(out)
28

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
887 result = self._slow_forward(*input, **kwargs)
888 else:
–> 889 result = self.forward(*input, **kwargs)
890 for hook in itertools.chain(
891 _global_forward_hooks.values(),

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/container.py in forward(self, input)
117 def forward(self, input):
118 for module in self:
–> 119 input = module(input)
120 return input
121

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
887 result = self._slow_forward(*input, **kwargs)
888 else:
–> 889 result = self.forward(*input, **kwargs)
890 for hook in itertools.chain(
891 _global_forward_hooks.values(),

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/fastai/text/learner.py in forward(self, input)
261 raw_outputs,outputs,masks = [],[],[]
262 for i in range(0, sl, self.bptt):
–> 263 r, o = self.module(input[:,i: min(i+self.bptt, sl)])
264 if i>(sl-self.max_len):
265 masks.append(input[:,i: min(i+self.bptt, sl)] == self.pad_idx)

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
887 result = self._slow_forward(*input, **kwargs)
888 else:
–> 889 result = self.forward(*input, **kwargs)
890 for hook in itertools.chain(
891 _global_forward_hooks.values(),

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/fastai/text/models/awd_lstm.py in forward(self, input, from_embeddings)
110 new_hidden,raw_outputs,outputs = [],[],[]
111 for l, (rnn,hid_dp) in enumerate(zip(self.rnns, self.hidden_dps)):
–> 112 raw_output, new_h = rnn(raw_output, self.hidden[l])
113 new_hidden.append(new_h)
114 raw_outputs.append(raw_output)

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
887 result = self._slow_forward(*input, **kwargs)
888 else:
–> 889 result = self.forward(*input, **kwargs)
890 for hook in itertools.chain(
891 _global_forward_hooks.values(),

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/fastai/text/models/awd_lstm.py in forward(self, *args)
47 #To avoid the warning that comes because the weights aren’t flattened.
48 warnings.simplefilter(“ignore”)
—> 49 return self.module.forward(*args)
50
51 def reset(self):

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/rnn.py in forward(self, input, hx)
657 hx = self.permute_hidden(hx, sorted_indices)
658
–> 659 self.check_forward_args(input, hx, batch_sizes)
660 if batch_sizes is None:
661 result = _VF.lstm(input, hx, self._flat_weights, self.bias, self.num_layers,

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/rnn.py in check_forward_args(self, input, hidden, batch_sizes)
604 def check_forward_args(self, input: Tensor, hidden: Tuple[Tensor, Tensor], batch_sizes: Optional[Tensor]): # type: ignore
605 self.check_input(input, batch_sizes)
–> 606 self.check_hidden_size(hidden[0], self.get_expected_hidden_size(input, batch_sizes),
607 ‘Expected hidden[0] size {}, got {}’)
608 self.check_hidden_size(hidden[1], self.get_expected_cell_size(input, batch_sizes),

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/rnn.py in get_expected_hidden_size(self, input, batch_sizes)
210 mini_batch = input.size(0) if self.batch_first else input.size(1)
211 num_directions = 2 if self.bidirectional else 1
–> 212 if self.proj_size > 0:
213 expected_hidden_size = (self.num_layers * num_directions,
214 mini_batch, self.proj_size)

/local_disk0/.ephemeral_nfs/envs/pythonEnv-7043ec3c-dac7-4961-bac7-0397385e83d3/lib/python3.7/site-packages/torch/nn/modules/module.py in getattr(self, name)
946 return modules[name]
947 raise AttributeError("’{}’ object has no attribute ‘{}’".format(
–> 948 type(self).name, name))
949
950 def setattr(self, name: str, value: Union[Tensor, ‘Module’]) -> None:

AttributeError: ‘LSTM’ object has no attribute ‘proj_size’

Does anyone know what has changed with the backend LSTM or how to fix this? Thank you!

I am getting exactly the same error, and don’t know the fix

using these deps version solved mine
torch==1.7.1

torchvision==0.1.8

this worked for me as well. thank you!

1 Like