Lesson 1 - ImageDataBunch error on GCP

I’m using GCP.

After executing a command:
data = ImageDataBunch.from_name_re(path_img, fnames, pat, ds_tfms=get_transforms(), size=224, bs=bs).normalize(imagenet_stats)

I’m getting an error:
RecursionError: maximum recursion depth exceeded while calling a Python object

_`

RecursionError                            Traceback (most recent call last)
<ipython-input-29-15e5d1d9602d> in <module>
----> 1 data = ImageDataBunch.from_name_re(path_img, fnames, pat, ds_tfms=get_transforms(), size=224, bs=bs
      2                                   ).normalize(imagenet_stats)

/opt/anaconda3/lib/python3.7/site-packages/fastai/vision/data.py in from_name_re(cls, path, fnames, pat, valid_pct, **kwargs)
    158         pat = re.compile(pat)
    159         def _get_label(fn): return pat.search(str(fn)).group(1)
--> 160         return cls.from_name_func(path, fnames, _get_label, valid_pct=valid_pct, **kwargs)
    161 
    162     @staticmethod

/opt/anaconda3/lib/python3.7/site-packages/fastai/vision/data.py in from_name_func(cls, path, fnames, label_func, valid_pct, **kwargs)
    151         "Create from list of `fnames` in `path` with `label_func`."
    152         src = ImageItemList(fnames, path=path).random_split_by_pct(valid_pct)
--> 153         return cls.create_from_ll(src.label_from_func(label_func), **kwargs)
    154 
    155     @classmethod

/opt/anaconda3/lib/python3.7/site-packages/fastai/vision/data.py in create_from_ll(cls, lls, bs, ds_tfms, num_workers, dl_tfms, device, test, collate_fn, size, no_check, **kwargs)
    108         lls = lls.transform(tfms=ds_tfms, size=size, **kwargs)
    109         if test is not None: lls.add_test_folder(test)
--> 110         return lls.databunch(bs=bs, dl_tfms=dl_tfms, num_workers=num_workers, collate_fn=collate_fn, device=device, no_check=no_check)
    111 
    112     @classmethod

/opt/anaconda3/lib/python3.7/site-packages/fastai/data_block.py in databunch(self, path, **kwargs)
    479         "Create an `DataBunch` from self, `path` will override `self.path`, `kwargs` are passed to `DataBunch.create`."
    480         path = Path(ifnone(path, self.path))
--> 481         data = self.x._bunch.create(self.train, self.valid, test_ds=self.test, path=path, **kwargs)
    482         if getattr(self, 'normalize', False):#In case a normalization was serialized
    483             norm = self.normalize

/opt/anaconda3/lib/python3.7/site-packages/fastai/basic_data.py in create(cls, train_ds, valid_ds, test_ds, path, bs, num_workers, dl_tfms, device, collate_fn, no_check)
    113         val_bs = bs
    114         dls = [DataLoader(d, b, shuffle=s, drop_last=s, num_workers=num_workers) for d,b,s in
--> 115                zip(datasets, (bs,val_bs,val_bs,val_bs), (True,False,False,False)) if d is not None]
    116         return cls(*dls, path=path, device=device, dl_tfms=dl_tfms, collate_fn=collate_fn, no_check=no_check)
    117 

/opt/anaconda3/lib/python3.7/site-packages/fastai/basic_data.py in <listcomp>(.0)
    113         val_bs = bs
    114         dls = [DataLoader(d, b, shuffle=s, drop_last=s, num_workers=num_workers) for d,b,s in
--> 115                zip(datasets, (bs,val_bs,val_bs,val_bs), (True,False,False,False)) if d is not None]
    116         return cls(*dls, path=path, device=device, dl_tfms=dl_tfms, collate_fn=collate_fn, no_check=no_check)
    117 

/opt/anaconda3/lib/python3.7/site-packages/fastai/basic_data.py in intercept_args(self, dataset, batch_size, shuffle, sampler, batch_sampler, num_workers, collate_fn, pin_memory, drop_last, timeout, worker_init_fn)
     14                         'num_workers':num_workers, 'collate_fn':collate_fn, 'pin_memory':pin_memory,
     15                         'drop_last': drop_last, 'timeout':timeout, 'worker_init_fn':worker_init_fn}
---> 16     old_dl_init(self, dataset, **self.init_kwargs)
     17 
     18 torch.utils.data.DataLoader.__init__ = intercept_args

... last 1 frames repeated, from the frame below ...

/opt/anaconda3/lib/python3.7/site-packages/fastai/basic_data.py in intercept_args(self, dataset, batch_size, shuffle, sampler, batch_sampler, num_workers, collate_fn, pin_memory, drop_last, timeout, worker_init_fn)
     14                         'num_workers':num_workers, 'collate_fn':collate_fn, 'pin_memory':pin_memory,
     15                         'drop_last': drop_last, 'timeout':timeout, 'worker_init_fn':worker_init_fn}
---> 16     old_dl_init(self, dataset, **self.init_kwargs)
     17 
     18 torch.utils.data.DataLoader.__init__ = intercept_args

RecursionError: maximum recursion depth exceeded while calling a Python object `_

I have no idea what may be wrong…

1 Like

I got the same error. I have an image set with the following structure.

data/train/class
data/train/class/image1
data/train/class/image2
data/test/class
data/test/class/image1
data/valid/class
data/valid/class/image1

Not sure what’s going on or how to fix it.
same structure of data as mnsit dataset.

Hi Aleksandra

Inside the jupyter notebook I did the following steps and got this same issue fixed:

  • Kernel > Change Kernel > Click Python 3
  • Kernel > restart
2 Likes