Lesson 2 official topic

Hi everyone, I’m trying to run things on a local Jupyter notebook.
When I try to run the following code I get an error.

path = Path('Dance_types')
dances = DataBlock(
    blocks=(ImageBlock, CategoryBlock), 
    get_items=get_image_files, 
    splitter=RandomSplitter(valid_pct=0.2, seed=42),
    get_y=parent_label,
    item_tfms=Resize(128))

dls = dances.dataloaders('Dance_types')

The error I get is this:

Could not do one pass in your dataloader, there is something wrong in it. Please see the stack trace below:

RuntimeError Traceback (most recent call last)
Cell In[22], line 1
----> 1 dls = dances.dataloaders(‘Dance_types’)

File /usr/local/lib/python3.10/site-packages/fastai/data/block.py:157, in DataBlock.dataloaders(self, source, path, verbose, **kwargs)
155 dsets = self.datasets(source, verbose=verbose)
156 kwargs = {**self.dls_kwargs, **kwargs, ‘verbose’: verbose}
→ 157 return dsets.dataloaders(path=path, after_item=self.item_tfms, after_batch=self.batch_tfms, **kwargs)

File /usr/local/lib/python3.10/site-packages/fastai/data/core.py:337, in FilteredBase.dataloaders(self, bs, shuffle_train, shuffle, val_shuffle, n, path, dl_type, dl_kwargs, device, drop_last, val_bs, **kwargs)
335 dl = dl_type(self.subset(0), **merge(kwargs,def_kwargs, dl_kwargs[0]))
336 def_kwargs = {‘bs’:bs if val_bs is None else val_bs,‘shuffle’:val_shuffle,‘n’:None,‘drop_last’:False}
→ 337 dls = [dl] + [dl.new(self.subset(i), **merge(kwargs,def_kwargs,val_kwargs,dl_kwargs[i]))
338 for i in range(1, self.n_subsets)]
339 return self._dbunch_type(*dls, path=path, device=device)

File /usr/local/lib/python3.10/site-packages/fastai/data/core.py:337, in (.0)
335 dl = dl_type(self.subset(0), **merge(kwargs,def_kwargs, dl_kwargs[0]))
336 def_kwargs = {‘bs’:bs if val_bs is None else val_bs,‘shuffle’:val_shuffle,‘n’:None,‘drop_last’:False}
→ 337 dls = [dl] + [dl.new(self.subset(i), **merge(kwargs,def_kwargs,val_kwargs,dl_kwargs[i]))
338 for i in range(1, self.n_subsets)]
339 return self._dbunch_type(*dls, path=path, device=device)

File /usr/local/lib/python3.10/site-packages/fastai/data/core.py:97, in TfmdDL.new(self, dataset, cls, **kwargs)
95 if not hasattr(self, ‘_n_inp’) or not hasattr(self, ‘_types’):
96 try:
—> 97 self._one_pass()
98 res._n_inp,res._types = self._n_inp,self._types
99 except Exception as e:

File /usr/local/lib/python3.10/site-packages/fastai/data/core.py:79, in TfmdDL._one_pass(self)
77 def _one_pass(self):
78 b = self.do_batch([self.do_item(None)])
—> 79 if self.device is not None: b = to_device(b, self.device)
80 its = self.after_batch(b)
81 self._n_inp = 1 if not isinstance(its, (list,tuple)) or len(its)==1 else len(its)-1

File /usr/local/lib/python3.10/site-packages/fastai/torch_core.py:285, in to_device(b, device, non_blocking)
283 # if hasattr(o, “to_device”): return o.to_device(device)
284 return o
→ 285 return apply(_inner, b)

File /usr/local/lib/python3.10/site-packages/fastai/torch_core.py:222, in apply(func, x, *args, **kwargs)
220 def apply(func, x, *args, **kwargs):
221 “Apply func recursively to x, passing on args”
→ 222 if is_listy(x): return type(x)([apply(func, o, *args, **kwargs) for o in x])
223 if isinstance(x,dict): return {k: apply(func, v, *args, **kwargs) for k,v in x.items()}
224 res = func(x, *args, **kwargs)

File /usr/local/lib/python3.10/site-packages/fastai/torch_core.py:222, in (.0)
220 def apply(func, x, *args, **kwargs):
221 “Apply func recursively to x, passing on args”
→ 222 if is_listy(x): return type(x)([apply(func, o, *args, **kwargs) for o in x])
223 if isinstance(x,dict): return {k: apply(func, v, *args, **kwargs) for k,v in x.items()}
224 res = func(x, *args, **kwargs)

File /usr/local/lib/python3.10/site-packages/fastai/torch_core.py:224, in apply(func, x, *args, **kwargs)
222 if is_listy(x): return type(x)([apply(func, o, *args, **kwargs) for o in x])
223 if isinstance(x,dict): return {k: apply(func, v, *args, **kwargs) for k,v in x.items()}
→ 224 res = func(x, *args, **kwargs)
225 return res if x is None else retain_type(res, x)

File /usr/local/lib/python3.10/site-packages/fastai/torch_core.py:282, in to_device.._inner(o)
281 def _inner(o):
→ 282 if isinstance(o,Tensor): return o.to(device, non_blocking=non_blocking)
283 # if hasattr(o, “to_device”): return o.to_device(device)
284 return o

File /usr/local/lib/python3.10/site-packages/fastai/torch_core.py:372, in TensorBase.torch_function(cls, func, types, args, kwargs)
370 if cls.debug and func.name not in (‘str’,‘repr’): print(func, types, args, kwargs)
371 if _torch_handled(args, cls._opt, func): types = (torch.Tensor,)
→ 372 res = super().torch_function(func, types, args, ifnone(kwargs, {}))
373 dict_objs = _find_args(args) if args else _find_args(list(kwargs.values()))
374 if issubclass(type(res),TensorBase) and dict_objs: res.set_meta(dict_objs[0],as_copy=True)

File /usr/local/lib/python3.10/site-packages/torch/_tensor.py:1279, in Tensor.torch_function(cls, func, types, args, kwargs)
1276 return NotImplemented
1278 with _C.DisableTorchFunction():
→ 1279 ret = func(*args, **kwargs)
1280 if func in get_default_nowrap_functions():
1281 return ret

RuntimeError: The MPS backend is supported on MacOS 12.3+.Current OS version can be queried using sw_vers

I’ve tried updating my system and I have now MacOS 13.2.1 but I still get the same error. I’ve looked around and it seems that people with the M1 chip were getting this type of error but I have the Intel one.

Could anyone help?

I have the same MacOS setup and issue with Jupyter Notebook using ImageDataLoaders.

transforms = [ Resize((50, 50), ResizeMethod.Crop)]
dls = ImageDataLoaders.from_folder(
    '/path/to/data',
    item_tfms=transforms,
    batch_tfms=aug_transforms()
)

It seems that the issue was introduced with the latest version 2.7.11. Downgrading the fastai version to 2.7.10 fixed the problem for me.

pip install fastai==2.7.10

You can see the version history here:

Good luck.

1 Like

Hi. I am experiencing a runtime error when using download_url as follows

I am not very well versed in python. Has anybody seen an error similar to this?

As the error message says, the download_url method expects a valid url (String, Bytes or Pathlike Object) as the first parameter but ims[0] is None.

Hello everyone!,
I posted this on the beginners question forum but it doesn’t look as active.

I don’t know if you all have encountered this problem before but I am havin troube with “pred,pred_idx,probs = learn_inf.predict(img)” in the bear classification model. For some reason I get "‘PILImage’ object has no attribute ‘read’.

The full trace stack:

AttributeError                            Traceback (most recent call last)
<ipython-input-80-9a18687b977c> in <module>
----> 1 pred,pred_idx,probs = learn_inf.predict(img)

25 frames
/usr/local/lib/python3.8/dist-packages/fastai/learner.py in predict(self, item, rm_type_tfms, with_input)
    319     def predict(self, item, rm_type_tfms=None, with_input=False):
    320         dl = self.dls.test_dl([item], rm_type_tfms=rm_type_tfms, num_workers=0)
--> 321         inp,preds,_,dec_preds = self.get_preds(dl=dl, with_input=True, with_decoded=True)
    322         i = getattr(self.dls, 'n_inp', -1)
    323         inp = (inp,) if i==1 else tuplify(inp)

/usr/local/lib/python3.8/dist-packages/fastai/learner.py in get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, reorder, cbs, **kwargs)
    306         if with_loss: ctx_mgrs.append(self.loss_not_reduced())
    307         with ContextManagers(ctx_mgrs):
--> 308             self._do_epoch_validate(dl=dl)
    309             if act is None: act = getcallable(self.loss_func, 'activation')
    310             res = cb.all_tensors()

/usr/local/lib/python3.8/dist-packages/fastai/learner.py in _do_epoch_validate(self, ds_idx, dl)
    242         if dl is None: dl = self.dls[ds_idx]
    243         self.dl = dl
--> 244         with torch.no_grad(): self._with_events(self.all_batches, 'validate', CancelValidException)
    245 
    246     def _do_epoch(self):

/usr/local/lib/python3.8/dist-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    197 
    198     def _with_events(self, f, event_type, ex, final=noop):
--> 199         try: self(f'before_{event_type}');  f()
    200         except ex: self(f'after_cancel_{event_type}')
    201         self(f'after_{event_type}');  final()

/usr/local/lib/python3.8/dist-packages/fastai/learner.py in all_batches(self)
    203     def all_batches(self):
    204         self.n_iter = len(self.dl)
--> 205         for o in enumerate(self.dl): self.one_batch(*o)
    206 
    207     def _backward(self): self.loss_grad.backward()

/usr/local/lib/python3.8/dist-packages/fastai/data/load.py in __iter__(self)
    125         self.before_iter()
    126         self.__idxs=self.get_idxs() # called in context of main process (not workers/subprocesses)
--> 127         for b in _loaders[self.fake_l.num_workers==0](self.fake_l):
    128             # pin_memory causes tuples to be converted to lists, so convert them back to tuples
    129             if self.pin_memory and type(b) == list: b = tuple(b)

/usr/local/lib/python3.8/dist-packages/torch/utils/data/dataloader.py in __next__(self)
    626                 # TODO(https://github.com/pytorch/pytorch/issues/76750)
    627                 self._reset()  # type: ignore[call-arg]
--> 628             data = self._next_data()
    629             self._num_yielded += 1
    630             if self._dataset_kind == _DatasetKind.Iterable and \

/usr/local/lib/python3.8/dist-packages/torch/utils/data/dataloader.py in _next_data(self)
    669     def _next_data(self):
    670         index = self._next_index()  # may raise StopIteration
--> 671         data = self._dataset_fetcher.fetch(index)  # may raise StopIteration
    672         if self._pin_memory:
    673             data = _utils.pin_memory.pin_memory(data, self._pin_memory_device)

/usr/local/lib/python3.8/dist-packages/torch/utils/data/_utils/fetch.py in fetch(self, possibly_batched_index)
     41                 raise StopIteration
     42         else:
---> 43             data = next(self.dataset_iter)
     44         return self.collate_fn(data)
     45 

/usr/local/lib/python3.8/dist-packages/fastai/data/load.py in create_batches(self, samps)
    136         if self.dataset is not None: self.it = iter(self.dataset)
    137         res = filter(lambda o:o is not None, map(self.do_item, samps))
--> 138         yield from map(self.do_batch, self.chunkify(res))
    139 
    140     def new(self, dataset=None, cls=None, **kwargs):

/usr/local/lib/python3.8/dist-packages/fastcore/basics.py in chunked(it, chunk_sz, drop_last, n_chunks)
    228     if not isinstance(it, Iterator): it = iter(it)
    229     while True:
--> 230         res = list(itertools.islice(it, chunk_sz))
    231         if res and (len(res)==chunk_sz or not drop_last): yield res
    232         if len(res)<chunk_sz: return

/usr/local/lib/python3.8/dist-packages/fastai/data/load.py in do_item(self, s)
    151     def prebatched(self): return self.bs is None
    152     def do_item(self, s):
--> 153         try: return self.after_item(self.create_item(s))
    154         except SkipItemException: return None
    155     def chunkify(self, b): return b if self.prebatched else chunked(b, self.bs, self.drop_last)

/usr/local/lib/python3.8/dist-packages/fastai/data/load.py in create_item(self, s)
    158     def retain(self, res, b):  return retain_types(res, b[0] if is_listy(b) else b)
    159     def create_item(self, s):
--> 160         if self.indexed: return self.dataset[s or 0]
    161         elif s is None:  return next(self.it)
    162         else: raise IndexError("Cannot index an iterable dataset numerically - must use `None`.")

/usr/local/lib/python3.8/dist-packages/fastai/data/core.py in __getitem__(self, it)
    456 
    457     def __getitem__(self, it):
--> 458         res = tuple([tl[it] for tl in self.tls])
    459         return res if is_indexer(it) else list(zip(*res))
    460 

/usr/local/lib/python3.8/dist-packages/fastai/data/core.py in <listcomp>(.0)
    456 
    457     def __getitem__(self, it):
--> 458         res = tuple([tl[it] for tl in self.tls])
    459         return res if is_indexer(it) else list(zip(*res))
    460 

/usr/local/lib/python3.8/dist-packages/fastai/data/core.py in __getitem__(self, idx)
    415         res = super().__getitem__(idx)
    416         if self._after_item is None: return res
--> 417         return self._after_item(res) if is_indexer(idx) else res.map(self._after_item)
    418 
    419 # %% ../../nbs/03_data.core.ipynb 53

/usr/local/lib/python3.8/dist-packages/fastai/data/core.py in _after_item(self, o)
    375             raise
    376     def subset(self, i): return self._new(self._get(self.splits[i]), split_idx=i)
--> 377     def _after_item(self, o): return self.tfms(o)
    378     def __repr__(self): return f"{self.__class__.__name__}: {self.items}\ntfms - {self.tfms.fs}"
    379     def __iter__(self): return (self[i] for i in range(len(self)))

/usr/local/lib/python3.8/dist-packages/fastcore/transform.py in __call__(self, o)
    206         self.fs = self.fs.sorted(key='order')
    207 
--> 208     def __call__(self, o): return compose_tfms(o, tfms=self.fs, split_idx=self.split_idx)
    209     def __repr__(self): return f"Pipeline: {' -> '.join([f.name for f in self.fs if f.name != 'noop'])}"
    210     def __getitem__(self,i): return self.fs[i]

/usr/local/lib/python3.8/dist-packages/fastcore/transform.py in compose_tfms(x, tfms, is_enc, reverse, **kwargs)
    156     for f in tfms:
    157         if not is_enc: f = f.decode
--> 158         x = f(x, **kwargs)
    159     return x
    160 

/usr/local/lib/python3.8/dist-packages/fastcore/transform.py in __call__(self, x, **kwargs)
     79     @property
     80     def name(self): return getattr(self, '_name', _get_name(self))
---> 81     def __call__(self, x, **kwargs): return self._call('encodes', x, **kwargs)
     82     def decode  (self, x, **kwargs): return self._call('decodes', x, **kwargs)
     83     def __repr__(self): return f'{self.name}:\nencodes: {self.encodes}decodes: {self.decodes}'

/usr/local/lib/python3.8/dist-packages/fastcore/transform.py in _call(self, fn, x, split_idx, **kwargs)
     89     def _call(self, fn, x, split_idx=None, **kwargs):
     90         if split_idx!=self.split_idx and self.split_idx is not None: return x
---> 91         return self._do_call(getattr(self, fn), x, **kwargs)
     92 
     93     def _do_call(self, f, x, **kwargs):

/usr/local/lib/python3.8/dist-packages/fastcore/transform.py in _do_call(self, f, x, **kwargs)
     95             if f is None: return x
     96             ret = f.returns(x) if hasattr(f,'returns') else None
---> 97             return retain_type(f(x, **kwargs), x, ret)
     98         res = tuple(self._do_call(f, x_, **kwargs) for x_ in x)
     99         return retain_type(res, x)

/usr/local/lib/python3.8/dist-packages/fastcore/dispatch.py in __call__(self, *args, **kwargs)
    118         elif self.inst is not None: f = MethodType(f, self.inst)
    119         elif self.owner is not None: f = MethodType(f, self.owner)
--> 120         return f(*args, **kwargs)
    121 
    122     def __get__(self, inst, owner):

/usr/local/lib/python3.8/dist-packages/fastai/vision/core.py in create(cls, fn, **kwargs)
    123         if isinstance(fn,bytes): fn = io.BytesIO(fn)
    124         if isinstance(fn,Image.Image) and not isinstance(fn,cls): return cls(fn)
--> 125         return cls(load_image(fn, **merge(cls._open_args, kwargs)))
    126 
    127     def show(self, ctx=None, **kwargs):

/usr/local/lib/python3.8/dist-packages/fastai/vision/core.py in load_image(fn, mode)
     96 def load_image(fn, mode=None):
     97     "Open and load a `PIL.Image` and convert to `mode`"
---> 98     im = Image.open(fn, mode="r")
     99     im.load()
    100     im = im._new(im.im)

/usr/local/lib/python3.8/dist-packages/PIL/Image.py in open(fp, mode)
   2850         exclusive_fp = True
   2851 
-> 2852     prefix = fp.read(16)
   2853 
   2854     preinit()

AttributeError: 'PILImage' object has no attribute 'read'

I thought I was able to fix the problem from something I saw on Google that said to change “Image. open()” to “to_image()” with no success. I also tried to see if maybe I wasn’t downloading the full library or an updated library by changing “fastai.vision.widgets” to “fastai.vision.all” that didn’t seem to work. I also tried to use a keyword argument “mode=r” in “Image. open()” with no success. I am a beginner so I might be approaching this problem wrong. If anyone could help me I would greatly appreciate it! I’ve been working on this for a couple of hours now but I won’t stop trying.

easiest way to solve the problem is not using PILImage.create

learn.predict('bear.jpg')

where bear.jpg is the file you want to predict. You can also use filepath object instead of ‘bear.jpg’ but not a PILImage object

5 Likes

That worked! Thank you so much! Please take this :cookie: as a sign of my appreciation. Do you mind explaining to me how you easily figured this out? I’m new to programming and want to improve my thinking.

For things like this, you should directly head to the documentation, it’s very well written, or you could use the doc function. Either way, check out the details about the function or class you are having trouble with and also check what type of data do their parameters take

1 Like

Noted. Thank you!

If there is something wrong with one of the lecture notebooks, you can search the forum (top right corner), most of the time it’s not only you having the problem. This for example was already reported as a similar problem in another notebook (see my link in my post).

2 Likes

When I go try to run the following code:

cleaner = ImageClassifierCleaner(learn)
cleaner

I am getting this error:

---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
/tmp/ipykernel_28/1061341133.py in <module>
----> 1 cleaner = ImageClassifierCleaner(learn)
      2 cleaner

NameError: name 'ImageClassifierCleaner' is not defined

Have you imported fastai vision widgets? If not, this should work.

from fastai.vision.widgets import *
1 Like

Hey this worked. I had the following in the beginning:

from fastai.vision.all import *

Would this not include

from fastai.vision.widgets import * ?

Thanks

1 Like

No, as you can see in the all.py file(which is being imported) the widgets are not included, they need to be imported individually

2 Likes

Hi, could anybody help tell me where I can see the fastbook library source code? not fastai source code, but the fastbook library source code. It seems on github, the fastbook repository only contains all the notebook stuff, there is no fastbook library code, how can I find it? Thanks a bunch!

Not 100% sure, but I believe the fastbook code is only this: fastbook/utils.py at master · fastai/fastbook · GitHub

So just some util functions.

What code are you expecting? All the stuff that is used in the book is coming from the fastai library

Hey folks, I want to know what the difference is between this:

faces = DataBlock(
    blocks=(ImageBlock, CategoryBlock), # we are dealing with categorical classification
    get_items=get_image_files, # function to fetch images from our dataset
    splitter=RandomSplitter(valid_pct=0.2, seed=42), # spliting our dataset
    get_y=parent_label, # class of category
    item_tfms=[Resize(192, method='squish')] # more like BoxFit.contain, from Flutter
)
dls = faces.dataloaders(path, bs=32).show_batch(max_n=9)

and this:

faces = faces.new(item_tfms=Resize(192, ResizeMethod.Pad, pad_mode='zeros'))
dls = faces.dataloaders(path)
dls.valid.show_batch(max_n=4, nrows=1)

Specifically, the new keyword in the “faces” data block?

We created a datablock initially without the new keyword and now because we want to change the image appearance we ignored the other properties of the datablock and only passed the item_tfms params? Why is this so? Does the new “faces datablock” retain the properties of the first created datablock?

Thanks, that’s exactly what I was looking for. Thank you Lucas.

Hello,

I have two different folders ( annotations and images) which contain 120 images in the images folder and the corresponding 120 annotations in an annotations folder. The format of the annotation is in “json”. I want to create a datablock so as to perform classification analysis. Please, how can I map my JSON files to the images so as to run the classification analysis?

Yes, there are many ways to do this. The details depends a bit on exactly how the data is structured, e.g. how the images are mapped to the entries in the json file.

If you are familiar with using pandas and especially referencing data in pandas dataframes as part of the datablocks API, you could read in your json files, combine them, put them into a single pandas dataframes and use that.