This is my list of batch transforms-
batch_transforms = [
# some transformation classes
Saturation(max_lighting=3.0, p=0.75),
# some other transformation classes
]
And this is my DataLoading and creating a DataBlock-
db = DataBlock(
blocks=(ImageBlock, CategoryBlock()),
getters=[ColReader('fname', pref=TRAIN_DIR),
ColReader('class')],
splitter=RandomSplitter(valid_pct=0.15, seed=42),
item_tfms=Resize(IMG_SIZE),
batch_tfms=batch_transforms
)
dls = db.dataloaders(train_df, bs=BATCH_SIZE)
When I call dls.show_batch()
I get this error-
ValueError: math domain error
Here’s the full error:
Expand full error
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-11-90634fcc3c9e> in <module>()
----> 1 dls.show_batch()
10 frames
/usr/local/lib/python3.7/dist-packages/fastai/data/core.py in show_batch(self, b, max_n, ctxs, show, unique, **kwargs)
100 old_get_idxs = self.get_idxs
101 self.get_idxs = lambda: Inf.zeros
--> 102 if b is None: b = self.one_batch()
103 if not show: return self._pre_show_batch(b, max_n=max_n)
104 show_batch(*self._pre_show_batch(b, max_n=max_n), ctxs=ctxs, max_n=max_n, **kwargs)
/usr/local/lib/python3.7/dist-packages/fastai/data/load.py in one_batch(self)
148 def one_batch(self):
149 if self.n is not None and len(self)==0: raise ValueError(f'This DataLoader does not contain any batches')
--> 150 with self.fake_l.no_multiproc(): res = first(self)
151 if hasattr(self, 'it'): delattr(self, 'it')
152 return res
/usr/local/lib/python3.7/dist-packages/fastcore/basics.py in first(x, f, negate, **kwargs)
545 x = iter(x)
546 if f: x = filter_ex(x, f=f, negate=negate, gen=True, **kwargs)
--> 547 return next(x, None)
548
549 # Cell
/usr/local/lib/python3.7/dist-packages/fastai/data/load.py in __iter__(self)
111 if self.device is not None and multiprocessing.get_start_method().lower() == "fork":
112 b = to_device(b, self.device)
--> 113 yield self.after_batch(b)
114 self.after_iter()
115 if hasattr(self, 'it'): del(self.it)
/usr/local/lib/python3.7/dist-packages/fastcore/transform.py in __call__(self, o)
196 self.fs.append(t)
197
--> 198 def __call__(self, o): return compose_tfms(o, tfms=self.fs, split_idx=self.split_idx)
199 def __repr__(self): return f"Pipeline: {' -> '.join([f.name for f in self.fs if f.name != 'noop'])}"
200 def __getitem__(self,i): return self.fs[i]
/usr/local/lib/python3.7/dist-packages/fastcore/transform.py in compose_tfms(x, tfms, is_enc, reverse, **kwargs)
148 for f in tfms:
149 if not is_enc: f = f.decode
--> 150 x = f(x, **kwargs)
151 return x
152
/usr/local/lib/python3.7/dist-packages/fastai/vision/augment.py in __call__(self, b, split_idx, **kwargs)
32
33 def __call__(self, b, split_idx=None, **kwargs):
---> 34 self.before_call(b, split_idx=split_idx)
35 return super().__call__(b, split_idx=split_idx, **kwargs) if self.do else b
36
/usr/local/lib/python3.7/dist-packages/fastai/vision/augment.py in before_call(self, b, split_idx)
680 self.do = True
681 while isinstance(b, tuple): b = b[0]
--> 682 for t in self.fs: t.before_call(b)
683
684 def compose(self, tfm):
/usr/local/lib/python3.7/dist-packages/fastai/vision/augment.py in before_call(self, x)
733
734 def before_call(self, x):
--> 735 self.change = _draw_mask(x, self._def_draw, draw=self.draw, p=self.p, neutral=1., batch=self.batch)
736
737 def __call__(self, x): return x.mul_(self.change[:,None,None,None])
/usr/local/lib/python3.7/dist-packages/fastai/vision/augment.py in _draw_mask(x, def_draw, draw, p, neutral, batch)
447 "Creates mask_tensor based on `x` with `neutral` with probability `1-p`. "
448 if draw is None: draw=def_draw
--> 449 if callable(draw): res=draw(x)
450 elif is_listy(draw):
451 assert len(draw)>=x.size(0)
/usr/local/lib/python3.7/dist-packages/fastai/vision/augment.py in _def_draw(self, x)
728
729 def _def_draw(self, x):
--> 730 if not self.batch: res = x.new_empty(x.size(0)).uniform_(math.log(1-self.max_lighting), -math.log(1-self.max_lighting))
731 else: res = x.new_zeros(x.size(0)) + random.uniform(math.log(1-self.max_lighting), -math.log(1-self.max_lighting))
732 return torch.exp(res)
ValueError: math domain error
And I am also shown this not-so-helpful error message when I ran the DataLoading cell-
Could not do one pass in your dataloader, there is something wrong in it
Can you shed some light into what’s wrong?