Create a dataset/dataloder for test/inference data from DataBlock

Hi, i have the following problem im using the DB to create my dataloaders, given a list of records. Thats works just fine:

db = DataBlock(blocks = (ImageBlock, MultiCategoryBlock),
get_x=getx,
get_y=gety,
item_tfms=Resize(460),
splitter=RandomSplitter(),
batch_tfms=[*aug_transforms(size=224, min_scale=0.75), Normalize.from_stats(*imagenet_stats)]
)
dls =db.dataloaders(filt_records, bs=32, device=‘cuda’)

But now lets say i want to create one data_loader with a new set of records (that i want to inference on) but i want to reuse the transformations from valid. So i just pass a new set of records and i dont want any splitting. How do i achieve this? I tried having splitters that split into one group but that failed. Do i u need to recreate them manually ? if so how?

You should do learn.dls.test_dl(myfnames) to generate a test DataLoader and then to do inference pass this to learn.get_preds(). This will also work after a learn.export() and a load_learner().

1 Like

Exactly what i needed;) thanks a ton.

Hi, I would like to add an external testing dataset and I tried what you suggested, I have to say I’m very new to fastai, so there might be a very simple mistake I’m making.

learn.export(trainImg + ‘/models/’ + ‘shortaxis_plaque_class_fasai2_v1.pkl’)
learn_inf = load_learner(trainImg + ‘/models/’ + ‘shortaxis_plaque_class_fasai2_v1.pkl’)
learn_inf.dls.test_dl(get_image_files(testImg))

x_test, y_test = learn_inf.get_preds()

But this i the error I get:

/home/peslami/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastprogress/fastprogress.py:74: UserWarning: Your generator is empty.
warn(“Your generator is empty.”)

IndexError Traceback (most recent call last)
in
----> 1 x_test, y_test = learn_inf.get_preds()
2 print (y_test.shape)
3 print(preds_tune.shape)
4
5 fpr,tpr,auc = roc_curve(preds_tune,y_test)

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastai2/learner.py in get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, reorder, **kwargs)
227 for mgr in ctx_mgrs: stack.enter_context(mgr)
228 self(event.begin_epoch if inner else _before_epoch)
–> 229 self._do_epoch_validate(dl=dl)
230 self(event.after_epoch if inner else _after_epoch)
231 if act is None: act = getattr(self.loss_func, ‘activation’, noop)

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastai2/learner.py in _do_epoch_validate(self, ds_idx, dl)
183 with torch.no_grad(): self.all_batches()
184 except CancelValidException: self(‘after_cancel_validate’)
–> 185 finally: self(‘after_validate’)
186
187 @log_args(but=‘cbs’)

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastai2/learner.py in call(self, event_name)
132 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
133
–> 134 def call(self, event_name): L(event_name).map(self._call_one)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
374 else f.format if isinstance(f,str)
375 else f.getitem)
–> 376 return self._new(map(g, self))
377
378 def filter(self, f, negate=False, **kwargs):

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
325 @property
326 def _xtra(self): return None
–> 327 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
328 def getitem(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
329 def copy(self): return self._new(self.items.copy())

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastcore/foundation.py in call(cls, x, args, **kwargs)
45 return x
46
—> 47 res = super().call(
((x,) + args), **kwargs)
48 res._newchk = 0
49 return res

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastcore/foundation.py in init(self, items, use_list, match, *rest)
316 if items is None: items = []
317 if (use_list is not None) or not _is_array(items):
–> 318 items = list(items) if use_list else _listify(items)
319 if match is not None:
320 if is_coll(match): match = len(match)

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastcore/foundation.py in _listify(o)
252 if isinstance(o, list): return o
253 if isinstance(o, str) or _is_array(o): return [o]
–> 254 if is_iter(o): return list(o)
255 return [o]
256

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastcore/foundation.py in call(self, *args, **kwargs)
218 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
219 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
–> 220 return self.fn(*fargs, **kwargs)
221
222 # Cell

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastai2/learner.py in _call_one(self, event_name)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastai2/learner.py in (.0)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)
–> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastai2/callback/core.py in call(self, event_name)
22 _run = (event_name not in _inner_loop or (self.run_train and getattr(self, ‘training’, True)) or
23 (self.run_valid and not getattr(self, ‘training’, False)))
—> 24 if self.run and _run: getattr(self, event_name, noop)()
25 if event_name==‘after_fit’: self.run=True #Reset self.run to True at each end of fit
26

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastai2/callback/core.py in after_validate(self)
94 “Concatenate all recorded tensors”
95 if self.with_input: self.inputs = detuplify(to_concat(self.inputs, dim=self.concat_dim))
—> 96 if not self.save_preds: self.preds = detuplify(to_concat(self.preds, dim=self.concat_dim))
97 if not self.save_targs: self.targets = detuplify(to_concat(self.targets, dim=self.concat_dim))
98 if self.with_loss: self.losses = to_concat(self.losses)

~/anaconda3/envs/fastai2_Apr20/lib/python3.8/site-packages/fastai2/torch_core.py in to_concat(xs, dim)
211 def to_concat(xs, dim=0):
212 “Concat the element in xs (recursively if they are tuples/lists of tensors)”
–> 213 if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
214 if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs[0].keys()}
215 #We may receives xs that are not concatenatable (inputs of a text classifier for instance),

IndexError: list index out of range

You need to pass your dl to get_preds, IE:
test_dl = learn_inf.dls.test_dl(get_image_files(testImg))
preds = learn_inf.get_preds(dl=test_dl)

2 Likes

Thanks so much for your response! So if I want to try tta on it, would it be ok if I pass the same test_dl? for example:
preds_tta = learn_inf.tta(dl=test)

When I do this, the AUC for both methods are the same, where as when I tried it with fastai1, it actually significantly increased the performance!

Yes. Are you sure all the parameters when doing so are the exact same?