I tried to downgrade my version with pip by rebuilding the source of pytorch.
It does not work either. I also edited the reg_rnn.py as suggested in the following Anyone able to execute Lesson4-imdb recently without any issue?
Here is the issue I am having
Epoch
0% 0/1 [00:00<?, ?it/s]
0%| | 0/6873 [00:00<?, ?it/s]
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-65-b544778ca021> in <module>()
----> 1 learner.fit(lrs/2, 1, wds=wd, use_clr=(32,2), cycle_len=1)
~/fastai/courses/dl2/fastai/learner.py in fit(self, lrs, n_cycle, wds, **kwargs)
285 self.sched = None
286 layer_opt = self.get_layer_opt(lrs, wds)
--> 287 return self.fit_gen(self.model, self.data, layer_opt, n_cycle, **kwargs)
288
289 def warm_up(self, lr, wds=None):
~/fastai/courses/dl2/fastai/learner.py in fit_gen(self, model, data, layer_opt, n_cycle, cycle_len, cycle_mult, cycle_save_name, best_save_name, use_clr, use_clr_beta, metrics, callbacks, use_wd_sched, norm_wds, wds_sched_mult, use_swa, swa_start, swa_eval_freq, **kwargs)
232 metrics=metrics, callbacks=callbacks, reg_fn=self.reg_fn, clip=self.clip, fp16=self.fp16,
233 swa_model=self.swa_model if use_swa else None, swa_start=swa_start,
--> 234 swa_eval_freq=swa_eval_freq, **kwargs)
235
236 def get_layer_groups(self): return self.models.get_layer_groups()
~/fastai/courses/dl2/fastai/model.py in fit(model, data, n_epochs, opt, crit, metrics, callbacks, stepper, swa_model, swa_start, swa_eval_freq, **kwargs)
127 batch_num += 1
128 for cb in callbacks: cb.on_batch_begin()
--> 129 loss = model_stepper.step(V(x),V(y), epoch)
130 avg_loss = avg_loss * avg_mom + loss * (1-avg_mom)
131 debias_loss = avg_loss / (1 - avg_mom**batch_num)
~/fastai/courses/dl2/fastai/model.py in step(self, xs, y, epoch)
46 def step(self, xs, y, epoch):
47 xtra = []
---> 48 output = self.m(*xs)
49 if isinstance(output,tuple): output,*xtra = output
50 if self.fp16: self.m.zero_grad()
~/fastai/venv/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
489 result = self._slow_forward(*input, **kwargs)
490 else:
--> 491 result = self.forward(*input, **kwargs)
492 for hook in self._forward_hooks.values():
493 hook_result = hook(self, input, result)
~/fastai/venv/lib/python3.6/site-packages/torch/nn/modules/container.py in forward(self, input)
89 def forward(self, input):
90 for module in self._modules.values():
---> 91 input = module(input)
92 return input
93
~/fastai/venv/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
489 result = self._slow_forward(*input, **kwargs)
490 else:
--> 491 result = self.forward(*input, **kwargs)
492 for hook in self._forward_hooks.values():
493 hook_result = hook(self, input, result)
~/fastai/courses/dl2/fastai/lm_rnn.py in forward(self, input)
99 outputs.append(raw_output)
100
--> 101 self.hidden = repackage_var(new_hidden)
102 return raw_outputs, outputs
103
~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
18 def repackage_var(h):
19 """Wraps h in new Variables, to detach them from their history."""
---> 20 return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
21
22
~/fastai/venv/lib/python3.6/site-packages/torch/tensor.py in __iter__(self)
358 # map will interleave them.)
359 if self.dim() == 0:
--> 360 raise TypeError('iteration over a 0-d tensor')
361 return iter(imap(lambda i: self[i], range(self.size(0))))
362
TypeError: iteration over a 0-d tensor