Anyone able to execute Lesson4-imdb recently without any issue?

Although FastAi is a great tool, everytime someone commits the code to github, it is getting broken easily.

I am not able to run the IMDB code nowadays and I am having the following NotImplementedError: :frowning:

Can you please help me?

Thank you

I would be happy if you let me know whether anyone able to run it nowadays or not?

Thank you

Could you post the method which isn’t implemented? On the plus side, imdb in dl2 does pretty much the same thing, so give that a go!

1 Like

Thank you so much for your valuable information.
I am so happy that someone replied me :slight_smile: I am going to try IMDB in dl2,

On the other hand, I am receiving the below error for the lesson4:

---------------------------------------------------------------------------
NotImplementedError                       Traceback (most recent call last)
<ipython-input-24-357a8890c905> in <module>()
----> 1 learner.fit(3e-3, 4, wds=1e-6, cycle_len=1, cycle_mult=2)

~~fastai/courses/dl1/fastai/learner.py in fit(self, lrs, n_cycle, wds, **kwargs)
    285         self.sched = None
    286         layer_opt = self.get_layer_opt(lrs, wds)
--> 287         return self.fit_gen(self.model, self.data, layer_opt, n_cycle, **kwargs)
    288 
    289     def warm_up(self, lr, wds=None):

~~fastai/courses/dl1/fastai/learner.py in fit_gen(self, model, data, layer_opt, n_cycle, cycle_len, cycle_mult, cycle_save_name, best_save_name, use_clr, use_clr_beta, metrics, callbacks, use_wd_sched, norm_wds, wds_sched_mult, use_swa, swa_start, swa_eval_freq, **kwargs)
    232             metrics=metrics, callbacks=callbacks, reg_fn=self.reg_fn, clip=self.clip, fp16=self.fp16,
    233             swa_model=self.swa_model if use_swa else None, swa_start=swa_start,
--> 234             swa_eval_freq=swa_eval_freq, **kwargs)
    235 
    236     def get_layer_groups(self): return self.models.get_layer_groups()

~~fastai/courses/dl1/fastai/model.py in fit(model, data, n_epochs, opt, crit, metrics, callbacks, stepper, swa_model, swa_start, swa_eval_freq, **kwargs)
    127             batch_num += 1
    128             for cb in callbacks: cb.on_batch_begin()
--> 129             loss = model_stepper.step(V(x),V(y), epoch)
    130             avg_loss = avg_loss * avg_mom + loss * (1-avg_mom)
    131             debias_loss = avg_loss / (1 - avg_mom**batch_num)

~~fastai/courses/dl1/fastai/model.py in step(self, xs, y, epoch)
     46     def step(self, xs, y, epoch):
     47         xtra = []
---> 48         output = self.m(*xs)
     49         if isinstance(output,tuple): output,*xtra = output
     50         if self.fp16: self.m.zero_grad()

~~fastai/venv/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    489             result = self._slow_forward(*input, **kwargs)
    490         else:
--> 491             result = self.forward(*input, **kwargs)
    492         for hook in self._forward_hooks.values():
    493             hook_result = hook(self, input, result)

~~fastai/venv/lib/python3.6/site-packages/torch/nn/modules/container.py in forward(self, input)
     89     def forward(self, input):
     90         for module in self._modules.values():
---> 91             input = module(input)
     92         return input
     93 

~~fastai/venv/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    489             result = self._slow_forward(*input, **kwargs)
    490         else:
--> 491             result = self.forward(*input, **kwargs)
    492         for hook in self._forward_hooks.values():
    493             hook_result = hook(self, input, result)

~~fastai/courses/dl1/fastai/lm_rnn.py in forward(self, input)
     84             self.reset()
     85 
---> 86         emb = self.encoder_with_dropout(input, dropout=self.dropoute if self.training else 0)
     87         emb = self.dropouti(emb)
     88 

~~fastai/venv/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    489             result = self._slow_forward(*input, **kwargs)
    490         else:
--> 491             result = self.forward(*input, **kwargs)
    492         for hook in self._forward_hooks.values():
    493             hook_result = hook(self, input, result)

~~fastai/courses/dl1/fastai/rnn_reg.py in forward(self, words, dropout, scale)
    174         if padding_idx is None: padding_idx = -1
    175 
--> 176         X = self.embed._backend.Embedding.apply(words,
    177              masked_embed_weight, padding_idx, self.embed.max_norm,
    178              self.embed.norm_type, self.embed.scale_grad_by_freq, self.embed.sparse)

~~fastai/venv/lib/python3.6/site-packages/torch/nn/backends/backend.py in __getattr__(self, name)
      8         fn = self.function_classes.get(name)
      9         if fn is None:
---> 10             raise NotImplementedError
     11         return fn
     12 

NotImplementedError:

This is me thinking aloud -
Try replacing this line with:

X = self.embed._backend.Embedding.apply(words,masked_embed_weight, padding_idx, self.embed.max_norm, self.embed.norm_type, self.embed.scale_grad_by_freq, self.embed.sparse)

with

    v =nn.Embedding(self.embed.num_embeddings,
         self.embed.embedding_dim, padding_idx, self.embed.max_norm,
         self.embed.norm_type, self.embed.scale_grad_by_freq, self.embed.sparse)
    v.weight = nn.Parameter(masked_embed_weight.data)
    X = v(words)

EDIT: This should cover all eventualities

Thank you but this time I am having the following error:

Epoch
0% 0/1 [00:00<?, ?it/s]
  0%|          | 0/6873 [00:00<?, ?it/s]
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-65-b544778ca021> in <module>()
----> 1 learner.fit(lrs/2, 1, wds=wd, use_clr=(32,2), cycle_len=1)

~/fastai/courses/dl2/fastai/learner.py in fit(self, lrs, n_cycle, wds, **kwargs)
    285         self.sched = None
    286         layer_opt = self.get_layer_opt(lrs, wds)
--> 287         return self.fit_gen(self.model, self.data, layer_opt, n_cycle, **kwargs)
    288 
    289     def warm_up(self, lr, wds=None):

~/fastai/courses/dl2/fastai/learner.py in fit_gen(self, model, data, layer_opt, n_cycle, cycle_len, cycle_mult, cycle_save_name, best_save_name, use_clr, use_clr_beta, metrics, callbacks, use_wd_sched, norm_wds, wds_sched_mult, use_swa, swa_start, swa_eval_freq, **kwargs)
    232             metrics=metrics, callbacks=callbacks, reg_fn=self.reg_fn, clip=self.clip, fp16=self.fp16,
    233             swa_model=self.swa_model if use_swa else None, swa_start=swa_start,
--> 234             swa_eval_freq=swa_eval_freq, **kwargs)
    235 
    236     def get_layer_groups(self): return self.models.get_layer_groups()

~/fastai/courses/dl2/fastai/model.py in fit(model, data, n_epochs, opt, crit, metrics, callbacks, stepper, swa_model, swa_start, swa_eval_freq, **kwargs)
    127             batch_num += 1
    128             for cb in callbacks: cb.on_batch_begin()
--> 129             loss = model_stepper.step(V(x),V(y), epoch)
    130             avg_loss = avg_loss * avg_mom + loss * (1-avg_mom)
    131             debias_loss = avg_loss / (1 - avg_mom**batch_num)

~/fastai/courses/dl2/fastai/model.py in step(self, xs, y, epoch)
     46     def step(self, xs, y, epoch):
     47         xtra = []
---> 48         output = self.m(*xs)
     49         if isinstance(output,tuple): output,*xtra = output
     50         if self.fp16: self.m.zero_grad()

~/fastai/venv/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    489             result = self._slow_forward(*input, **kwargs)
    490         else:
--> 491             result = self.forward(*input, **kwargs)
    492         for hook in self._forward_hooks.values():
    493             hook_result = hook(self, input, result)

~/fastai/venv/lib/python3.6/site-packages/torch/nn/modules/container.py in forward(self, input)
     89     def forward(self, input):
     90         for module in self._modules.values():
---> 91             input = module(input)
     92         return input
     93 

~/fastai/venv/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
    489             result = self._slow_forward(*input, **kwargs)
    490         else:
--> 491             result = self.forward(*input, **kwargs)
    492         for hook in self._forward_hooks.values():
    493             hook_result = hook(self, input, result)

~/fastai/courses/dl2/fastai/lm_rnn.py in forward(self, input)
     99             outputs.append(raw_output)
    100 
--> 101         self.hidden = repackage_var(new_hidden)
    102         return raw_outputs, outputs
    103 

~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in <genexpr>(.0)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/courses/dl2/fastai/lm_rnn.py in repackage_var(h)
     18 def repackage_var(h):
     19     """Wraps h in new Variables, to detach them from their history."""
---> 20     return Variable(h.data) if type(h) == Variable else tuple(repackage_var(v) for v in h)
     21 
     22 

~/fastai/venv/lib/python3.6/site-packages/torch/tensor.py in __iter__(self)
    358         # map will interleave them.)
    359         if self.dim() == 0:
--> 360             raise TypeError('iteration over a 0-d tensor')
    361         return iter(imap(lambda i: self[i], range(self.size(0))))
    362 

TypeError: iteration over a 0-d tensor

I also tried this and got the same. This is from imdb in dl2:
TypeError: iteration over a 0-d tensor

Both the issues above are due to changes in Pytorch 0.4. You shouldn’t upgrade to 0.4 yet - it’s not supported by fastai. If you stick with the environment provided in the fastai github repo it will automatically ensure you have the correct version.

2 Likes

@jeremy I seem to have Pytorch 0.4 already in the env. I am facing dataloader errors when running IMDB notebook. Am I supposed to downgrade it?

Error:

--------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-40-99852b8a0098> in <module>()
      1 m3.freeze_to(-1)
----> 2 m3.fit(lrs/2,1, metrics = [accuracy])
      3 #m3.unfreeze()
      4 #m3.fit(lrs, 1, metrics = [accuracy], cycle_len = 1)

~/fastai/courses/dl1/fastai/learner.py in fit(self, lrs, n_cycle, wds, **kwargs)
    213         self.sched = None
    214         layer_opt = self.get_layer_opt(lrs, wds)
--> 215         return self.fit_gen(self.model, self.data, layer_opt, n_cycle, **kwargs)
    216 
    217     def warm_up(self, lr, wds=None):

~/fastai/courses/dl1/fastai/learner.py in fit_gen(self, model, data, layer_opt, n_cycle, cycle_len, cycle_mult, cycle_save_name, best_save_name, use_clr, metrics, callbacks, use_wd_sched, norm_wds, wds_sched_mult, **kwargs)
    160         n_epoch = sum_geom(cycle_len if cycle_len else 1, cycle_mult, n_cycle)
    161         return fit(model, data, n_epoch, layer_opt.opt, self.crit,
--> 162             metrics=metrics, callbacks=callbacks, reg_fn=self.reg_fn, clip=self.clip, **kwargs)
    163 
    164     def get_layer_groups(self): return self.models.get_layer_groups()

~/fastai/courses/dl1/fastai/model.py in fit(model, data, epochs, opt, crit, metrics, callbacks, stepper, **kwargs)
     91         t = tqdm(iter(data.trn_dl), leave=False, total=num_batch)
     92         i = 0
---> 93         for (*x,y) in t:
     94             batch_num += 1
     95             for cb in callbacks: cb.on_batch_begin()

~/anaconda3/envs/fastai/lib/python3.6/site-packages/tqdm/_tqdm.py in __iter__(self)
    953 """, fp_write=getattr(self.fp, 'write', sys.stderr.write))
    954 
--> 955             for obj in iterable:
    956                 yield obj
    957                 # Update and possibly print the progressbar.

~/fastai/courses/dl1/fastai/dataloader.py in __iter__(self)
     77         else:
     78             with ThreadPoolExecutor(max_workers=self.num_workers) as e:
---> 79                 for batch in e.map(self.get_batch, iter(self.batch_sampler)):
     80                     yield get_tensor(batch, self.pin_memory)
     81 

~/anaconda3/envs/fastai/lib/python3.6/concurrent/futures/_base.py in result_iterator()
    584                     # Careful not to keep a reference to the popped future
    585                     if timeout is None:
--> 586                         yield fs.pop().result()
    587                     else:
    588                         yield fs.pop().result(end_time - time.time())

~/anaconda3/envs/fastai/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
    423                 raise CancelledError()
    424             elif self._state == FINISHED:
--> 425                 return self.__get_result()
    426 
    427             self._condition.wait(timeout)

~/anaconda3/envs/fastai/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
    382     def __get_result(self):
    383         if self._exception:
--> 384             raise self._exception
    385         else:
    386             return self._result

~/anaconda3/envs/fastai/lib/python3.6/concurrent/futures/thread.py in run(self)
     54 
     55         try:
---> 56             result = self.fn(*self.args, **self.kwargs)
     57         except BaseException as exc:
     58             self.future.set_exception(exc)

~/fastai/courses/dl1/fastai/dataloader.py in get_batch(self, indices)
     66 
     67     def get_batch(self, indices):
---> 68         res = self.collate_fn([self.dataset[i] for i in indices], self.pad_idx)
     69         if not self.transpose: return res
     70         res[0] = res[0].T

~/fastai/courses/dl1/fastai/dataloader.py in <listcomp>(.0)
     66 
     67     def get_batch(self, indices):
---> 68         res = self.collate_fn([self.dataset[i] for i in indices], self.pad_idx)
     69         if not self.transpose: return res
     70         res[0] = res[0].T

TypeError: 'TextDataLoader' object does not support indexing