When running this code in IMDB lesson notebook:
s = "Cars have already started the march toward autonomy"
m = learner.model
# def proc_str(s): return TEXT.preprocess(TEXT.tokenize(s))
def proc_str(s): return [spacy_tok(s)]
def num_str(s): return TEXT.numericalize([proc_str(s)])
def sample_model(m, s, l=50):
t = num_str(s)
m[0].bs=1
m.eval()
m.reset()
res,*_ = m(t)
print('...', end='')
for i in range(l):
n=res[-1].topk(2)[1]
n = n[1] if n.data[0]==0 else n[0]
word = TEXT.vocab.itos[n.data[0]]
print(word, end=' ')
if word=='<eos>': break
res,*_ = m(n[0].unsqueeze(0))
m[0].bs=bs
sample_model(m,s)
I got the error:
ValueError Traceback (most recent call last)
<ipython-input-109-6b9c36a1a857> in <module>()
24 m[0].bs=bs
25
---> 26 sample_model(m,s)
<ipython-input-109-6b9c36a1a857> in sample_model(m, s, l)
20 print(word, end=' ')
21 if word=='<eos>': break
---> 22 res,*_ = m(n[0].unsqueeze(0))
23
24 m[0].bs=bs
~/anaconda3/envs/fastai/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
489 result = self._slow_forward(*input, **kwargs)
490 else:
--> 491 result = self.forward(*input, **kwargs)
492 for hook in self._forward_hooks.values():
493 hook_result = hook(self, input, result)
~/anaconda3/envs/fastai/lib/python3.6/site-packages/torch/nn/modules/container.py in forward(self, input)
89 def forward(self, input):
90 for module in self._modules.values():
---> 91 input = module(input)
92 return input
93
~/anaconda3/envs/fastai/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
489 result = self._slow_forward(*input, **kwargs)
490 else:
--> 491 result = self.forward(*input, **kwargs)
492 for hook in self._forward_hooks.values():
493 hook_result = hook(self, input, result)
~/fastai/courses/dl1/fastai/lm_rnn.py in forward(self, input)
91 dropouth, list of tensors evaluated from each RNN layer using dropouth,
92 """
---> 93 sl,bs = input.size()
94 if bs!=self.bs:
95 self.bs=bs
ValueError: not enough values to unpack (expected 2, got 1)
I figured that the result of n[0].unsqueeze(0)
is a 1 dimensional (?) tensor when the code expects it to be 2 values. Any ideas on how to fix that?