Hi,
I am re-running some code the used to work less than one month ago, but now I get an error.
Have not been able to find a solution… Any help will be welcome!
data_lm = (TextList.from_folder(PATH_POEMS)
#Inputs: all the text files in path
.split_by_rand_pct(0.1)
#We randomly split and keep 10% (10,000 reviews) for validation
.label_for_lm()
#We want to do a language model so we label accordingly
.databunch(bs=bs))
FILE_LM_ENCODER = '/home/jupyter/.fastai/data/p_gen/models/model-noqrnn'
FILE_ITOS = '/home/jupyter/.fastai/data/p_gen/models/itos_pretrained'
learn = language_model_learner(data_lm, AWD_LSTM,
pretrained_fnames=[FILE_LM_ENCODER, FILE_ITOS],
drop_mult=0.3)
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-8-1544d2335cf5> in <module>
8 pretrained_fnames=[FILE_LM_ENCODER, FILE_ITOS],
----> 9 drop_mult=0.3)
/opt/anaconda3/lib/python3.7/site-packages/fastai/text/learner.py in language_model_learner(data, arch, config, drop_mult, pretrained, pretrained_fnames, **learn_kwargs)
216 model_path = untar_data(meta[url] , data=False)
217 fnames = [list(model_path.glob(f'*.{ext}'))[0] for ext in ['pth', 'pkl']]
--> 218 learn.load_pretrained(*fnames)
219 learn.freeze()
220 return learn
/opt/anaconda3/lib/python3.7/site-packages/fastai/text/learner.py in load_pretrained(self, wgts_fname, itos_fname, strict)
78 if 'model' in wgts: wgts = wgts['model']
79 wgts = convert_weights(wgts, old_stoi, self.data.train_ds.vocab.itos)
---> 80 self.model.load_state_dict(wgts, strict=strict)
81
82 def get_preds(self, ds_type:DatasetType=DatasetType.Valid, with_loss:bool=False, n_batch:Optional[int]=None, pbar:Optional[PBar]=None,
/opt/anaconda3/lib/python3.7/site-packages/torch/nn/modules/module.py in load_state_dict(self, state_dict, strict)
775 if len(error_msgs) > 0:
776 raise RuntimeError('Error(s) in loading state_dict for {}:\n\t{}'.format(
--> 777 self.__class__.__name__, "\n\t".join(error_msgs)))
778 return _IncompatibleKeys(missing_keys, unexpected_keys)
779
RuntimeError: Error(s) in loading state_dict for SequentialRNN:
size mismatch for 0.rnns.0.weight_hh_l0_raw: copying a param with shape torch.Size([4600, 1150]) from checkpoint, the shape in current model is torch.Size([4608, 1152]).
size mismatch for 0.rnns.0.module.weight_ih_l0: copying a param with shape torch.Size([4600, 400]) from checkpoint, the shape in current model is torch.Size([4608, 400]).
size mismatch for 0.rnns.0.module.weight_hh_l0: copying a param with shape torch.Size([4600, 1150]) from checkpoint, the shape in current model is torch.Size([4608, 1152]).
size mismatch for 0.rnns.0.module.bias_ih_l0: copying a param with shape torch.Size([4600]) from checkpoint, the shape in current model is torch.Size([4608]).
size mismatch for 0.rnns.0.module.bias_hh_l0: copying a param with shape torch.Size([4600]) from checkpoint, the shape in current model is torch.Size([4608]).
size mismatch for 0.rnns.1.weight_hh_l0_raw: copying a param with shape torch.Size([4600, 1150]) from checkpoint, the shape in current model is torch.Size([4608, 1152]).
size mismatch for 0.rnns.1.module.weight_ih_l0: copying a param with shape torch.Size([4600, 1150]) from checkpoint, the shape in current model is torch.Size([4608, 1152]).
size mismatch for 0.rnns.1.module.weight_hh_l0: copying a param with shape torch.Size([4600, 1150]) from checkpoint, the shape in current model is torch.Size([4608, 1152]).
size mismatch for 0.rnns.1.module.bias_ih_l0: copying a param with shape torch.Size([4600]) from checkpoint, the shape in current model is torch.Size([4608]).
size mismatch for 0.rnns.1.module.bias_hh_l0: copying a param with shape torch.Size([4600]) from checkpoint, the shape in current model is torch.Size([4608]).
size mismatch for 0.rnns.2.module.weight_ih_l0: copying a param with shape torch.Size([1600, 1150]) from checkpoint, the shape in current model is torch.Size([1600, 1152]).