I am running a text classification model in production that has been using fastai 1.0.60. I chose not to update to fastai v2 because there were bugs around using load_learner() and loading a pretrained model. Up until early January, this method was working perfectly, but now I am getting this error when running learn.predict(). The input is a string, so no idea why this error is popping up. Has anyone else faced this issue? Thanks!
torch == 1.4.0
fastai == 1.0.60
path = ‘files/modelsavelocation’
learn = load_learner(path, ‘final_whole’)
learn.predict(‘How can I find bios?’)
TypeError Traceback (most recent call last)
in
----> 1 learn.predict(‘How can I find bios?’)/local_disk0/.ephemeral_nfs/envs/pythonEnv-5caaef5d-ec1e-4e25-80d3-38d7a857d092/lib/python3.7/site-packages/fastai/basic_train.py in predict(self, item, return_x, batch_first, with_dropout, **kwargs)
370 def predict(self, item:ItemBase, return_x:bool=False, batch_first:bool=True, with_dropout:bool=False, **kwargs):
371 “Return predicted class, label and probabilities foritem
.”
→ 372 batch = self.data.one_item(item)
373 res = self.pred_batch(batch=batch, with_dropout=with_dropout)
374 raw_pred,x = grab_idx(res,0,batch_first=batch_first),batch[0]/local_disk0/.ephemeral_nfs/envs/pythonEnv-5caaef5d-ec1e-4e25-80d3-38d7a857d092/lib/python3.7/site-packages/fastai/basic_data.py in one_item(self, item, detach, denorm, cpu)
179 “Getitem
into a batch. Optionallydetach
anddenorm
.”
180 ds = self.single_ds
→ 181 with ds.set_item(item):
182 return self.one_batch(ds_type=DatasetType.Single, detach=detach, denorm=denorm, cpu=cpu)
183/usr/lib/python3.7/contextlib.py in enter(self)
110 del self.args, self.kwds, self.func
111 try:
→ 112 return next(self.gen)
113 except StopIteration:
114 raise RuntimeError(“generator didn’t yield”) from None/local_disk0/.ephemeral_nfs/envs/pythonEnv-5caaef5d-ec1e-4e25-80d3-38d7a857d092/lib/python3.7/site-packages/fastai/data_block.py in set_item(self, item)
613 def set_item(self,item):
614 “For inference, will briefly replace the dataset with one that only containsitem
.”
→ 615 self.item = self.x.process_one(item)
616 yield None
617 self.item = None/local_disk0/.ephemeral_nfs/envs/pythonEnv-5caaef5d-ec1e-4e25-80d3-38d7a857d092/lib/python3.7/site-packages/fastai/data_block.py in process_one(self, item, processor)
89 if processor is not None: self.processor = processor
90 self.processor = listify(self.processor)
—> 91 for p in self.processor: item = p.process_one(item)
92 return item
93/local_disk0/.ephemeral_nfs/envs/pythonEnv-5caaef5d-ec1e-4e25-80d3-38d7a857d092/lib/python3.7/site-packages/fastai/text/data.py in process_one(self, item)
289
290 def process_one(self, item):
→ 291 return self.tokenizer._process_all_1(_join_texts([item], self.mark_fields, self.include_bos, self.include_eos))[0]
292
293 def process(self, ds):/local_disk0/.ephemeral_nfs/envs/pythonEnv-5caaef5d-ec1e-4e25-80d3-38d7a857d092/lib/python3.7/site-packages/fastai/text/transform.py in _process_all_1(self, texts)
110 def _process_all_1(self, texts:Collection[str]) → List[List[str]]:
111 “Process a list oftexts
in one process.”
→ 112 tok = self.tok_func(self.lang)
113 if self.special_cases: tok.add_special_cases(self.special_cases)
114 return [self.process_text(str(t), tok) for t in texts]/local_disk0/.ephemeral_nfs/envs/pythonEnv-5caaef5d-ec1e-4e25-80d3-38d7a857d092/lib/python3.7/site-packages/fastai/text/transform.py in init(self, lang)
23 “Wrapper around a spacy tokenizer to make it aBaseTokenizer
.”
24 def init(self, lang:str):
—> 25 self.tok = spacy.blank(lang, disable=[“parser”,“tagger”,“ner”])
26
27 def tokenizer(self, t:str) → List[str]:TypeError: blank() got an unexpected keyword argument ‘disable’