Error trying to test Lesson 1 - What's your pet

I am using fastai version 1.0.30

While trying to run this:
data.show_batch(rows=3, figsize=(7,6))

I get:

RuntimeError Traceback (most recent call last)
in ()
----> 1 data.show_batch(rows=3, figsize=(7,6))

/usr/local/lib/python3.6/dist-packages/fastai/basic_data.py in show_batch(self, rows, ds_type, **kwargs)
153 def show_batch(self, rows:int=5, ds_type:DatasetType=DatasetType.Train, **kwargs)->None:
154 “Show a batch of data in ds_type on a few rows.”
–> 155 x,y = self.one_batch(ds_type, True, True)
156 if self.train_ds.x._square_show: rows = rows ** 2
157 xs = [self.train_ds.x.reconstruct(grab_idx(x, i, self._batch_first)) for i in range(rows)]

/usr/local/lib/python3.6/dist-packages/fastai/basic_data.py in one_batch(self, ds_type, detach, denorm)
134 w = self.num_workers
135 self.num_workers = 0
–> 136 try: x,y = next(iter(dl))
137 finally: self.num_workers = w
138 if detach: x,y = to_detach(x),to_detach(y)

/usr/local/lib/python3.6/dist-packages/fastai/basic_data.py in iter(self)
67 “Process and returns items from DataLoader.”
68 assert not self.skip_size1 or self.batch_size > 1, “Batch size cannot be one if skip_size1 is set to True”
—> 69 for b in self.dl:
70 y = b[1][0] if is_listy(b[1]) else b[1]
71 if not self.skip_size1 or y.size(0) != 1: yield self.proc_batch(b)

~/.local/lib/python3.6/site-packages/torch/utils/data/dataloader.py in next(self)
635 self.reorder_dict[idx] = batch
636 continue
–> 637 return self._process_next_batch(batch)
638
639 next = next # Python 2 compatibility

~/.local/lib/python3.6/site-packages/torch/utils/data/dataloader.py in _process_next_batch(self, batch)
656 self._put_indices()
657 if isinstance(batch, ExceptionWrapper):
–> 658 raise batch.exc_type(batch.exc_msg)
659 return batch
660

RuntimeError: Traceback (most recent call last):
File “/home/sra42/.local/lib/python3.6/site-packages/torch/utils/data/dataloader.py”, line 138, in _worker_loop
samples = collate_fn([dataset[i] for i in batch_indices])
File “/home/sra42/.local/lib/python3.6/site-packages/torch/utils/data/dataloader.py”, line 138, in
samples = collate_fn([dataset[i] for i in batch_indices])
File “/usr/local/lib/python3.6/dist-packages/fastai/data_block.py”, line 450, in getitem
x = x.apply_tfms(self.tfms, **self.tfmargs)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 114, in apply_tfms
else: x = tfm(x)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 494, in call
return self.tfm(x, *args, **{**self.resolved, **kwargs}) if self.do_run else x
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 441, in call
if args: return self.calc(*args, **kwargs)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 446, in calc
if self._wrap: return getattr(x, self._wrap)(self.func, *args, **kwargs)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 168, in coord
self.flow = func(self.flow, *args, **kwargs)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/transform.py”, line 224, in symmetric_warp
return _perspective_warp(c, targ_pts, invert)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/transform.py”, line 210, in _perspective_warp
return _apply_perspective(c, _find_coeffs(_orig_pts, targ_pts))
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/transform.py”, line 191, in _find_coeffs
return torch.gesv(B,A)[0][:,0]
RuntimeError: b should have at least 2 dimensions, but has 1 dimensions instead

And learn.fit_one_cycle(4) also gives this:


RuntimeError Traceback (most recent call last)
in ()
----> 1 learn.fit_one_cycle(4)

/usr/local/lib/python3.6/dist-packages/fastai/train.py in fit_one_cycle(learn, cyc_len, max_lr, moms, div_factor, pct_start, wd, callbacks, **kwargs)
18 callbacks.append(OneCycleScheduler(learn, max_lr, moms=moms, div_factor=div_factor,
19 pct_start=pct_start, **kwargs))
—> 20 learn.fit(cyc_len, max_lr, wd=wd, callbacks=callbacks)
21
22 def lr_find(learn:Learner, start_lr:Floats=1e-7, end_lr:Floats=10, num_it:int=100, stop_div:bool=True, **kwargs:Any):

/usr/local/lib/python3.6/dist-packages/fastai/basic_train.py in fit(self, epochs, lr, wd, callbacks)
160 callbacks = [cb(self) for cb in self.callback_fns] + listify(callbacks)
161 fit(epochs, self.model, self.loss_func, opt=self.opt, data=self.data, metrics=self.metrics,
–> 162 callbacks=self.callbacks+callbacks)
163
164 def create_opt(self, lr:Floats, wd:Floats=0.)->None:

/usr/local/lib/python3.6/dist-packages/fastai/basic_train.py in fit(epochs, model, loss_func, opt, data, callbacks, metrics)
92 except Exception as e:
93 exception = e
—> 94 raise e
95 finally: cb_handler.on_train_end(exception)
96

/usr/local/lib/python3.6/dist-packages/fastai/basic_train.py in fit(epochs, model, loss_func, opt, data, callbacks, metrics)
80 cb_handler.on_epoch_begin()
81
—> 82 for xb,yb in progress_bar(data.train_dl, parent=pbar):
83 xb, yb = cb_handler.on_batch_begin(xb, yb)
84 loss = loss_batch(model, xb, yb, loss_func, opt, cb_handler)

~/.local/lib/python3.6/site-packages/fastprogress/fastprogress.py in iter(self)
64 self.update(0)
65 try:
—> 66 for i,o in enumerate(self._gen):
67 yield o
68 if self.auto_update: self.update(i+1)

/usr/local/lib/python3.6/dist-packages/fastai/basic_data.py in iter(self)
67 “Process and returns items from DataLoader.”
68 assert not self.skip_size1 or self.batch_size > 1, “Batch size cannot be one if skip_size1 is set to True”
—> 69 for b in self.dl:
70 y = b[1][0] if is_listy(b[1]) else b[1]
71 if not self.skip_size1 or y.size(0) != 1: yield self.proc_batch(b)

~/.local/lib/python3.6/site-packages/torch/utils/data/dataloader.py in next(self)
635 self.reorder_dict[idx] = batch
636 continue
–> 637 return self._process_next_batch(batch)
638
639 next = next # Python 2 compatibility

~/.local/lib/python3.6/site-packages/torch/utils/data/dataloader.py in _process_next_batch(self, batch)
656 self._put_indices()
657 if isinstance(batch, ExceptionWrapper):
–> 658 raise batch.exc_type(batch.exc_msg)
659 return batch
660

RuntimeError: Traceback (most recent call last):
File “/home/sra42/.local/lib/python3.6/site-packages/torch/utils/data/dataloader.py”, line 138, in _worker_loop
samples = collate_fn([dataset[i] for i in batch_indices])
File “/home/sra42/.local/lib/python3.6/site-packages/torch/utils/data/dataloader.py”, line 138, in
samples = collate_fn([dataset[i] for i in batch_indices])
File “/usr/local/lib/python3.6/dist-packages/fastai/data_block.py”, line 450, in getitem
x = x.apply_tfms(self.tfms, **self.tfmargs)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 114, in apply_tfms
else: x = tfm(x)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 494, in call
return self.tfm(x, *args, **{**self.resolved, **kwargs}) if self.do_run else x
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 441, in call
if args: return self.calc(*args, **kwargs)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 446, in calc
if self._wrap: return getattr(x, self._wrap)(self.func, *args, **kwargs)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/image.py”, line 168, in coord
self.flow = func(self.flow, *args, **kwargs)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/transform.py”, line 224, in symmetric_warp
return _perspective_warp(c, targ_pts, invert)
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/transform.py”, line 210, in _perspective_warp
return _apply_perspective(c, _find_coeffs(_orig_pts, targ_pts))
File “/usr/local/lib/python3.6/dist-packages/fastai/vision/transform.py”, line 191, in _find_coeffs
return torch.gesv(B,A)[0][:,0]
RuntimeError: b should have at least 2 dimensions, but has 1 dimensions instead

I had the same problem. For me it was solved by upgrading fastai to version 1.0.41. With Conda the command is:

conda install -c fastai fastai=1.0.41

I found the necessary information on this forum, in an other thread with title ’ Lesson 1 throwing error in ImageDataBunch stage in windows’ :slight_smile:

great – thanks for your help.
am i supposed to type that into my notebook?