I believe the Dicom support that was merged into the master branch is broken. Lesson1.ipynb now gives the following error when trying to create a learner:
UnicodeDecodeError Traceback (most recent call last)
in ()
1 arch=resnet34
2 data = ImageClassifierData.from_paths(PATH, tfms=tfms_from_model(arch, sz))
----> 3 learn = ConvLearner.pretrained(arch, data, precompute=True)
4 learn.fit(0.01, 2)
/usr/local/lib/python3.6/dist-packages/fastai/conv_learner.py in pretrained(cls, f, data, ps, xtra_fc, xtra_cut, custom_head, precompute, pretrained, **kwargs)
112 models = ConvnetBuilder(f, data.c, data.is_multi, data.is_reg,
113 ps=ps, xtra_fc=xtra_fc, xtra_cut=xtra_cut, custom_head=custom_head, pretrained=pretrained)
–> 114 return cls(data, models, precompute, **kwargs)
115
116 @classmethod
/usr/local/lib/python3.6/dist-packages/fastai/conv_learner.py in init(self, data, models, precompute, **kwargs)
98 if hasattr(data, ‘is_multi’) and not data.is_reg and self.metrics is None:
99 self.metrics = [accuracy_thresh(0.5)] if self.data.is_multi else [accuracy]
–> 100 if precompute: self.save_fc1()
101 self.freeze()
102 self.precompute = precompute
/usr/local/lib/python3.6/dist-packages/fastai/conv_learner.py in save_fc1(self)
177 m=self.models.top_model
178 if len(self.activations[0])!=len(self.data.trn_ds):
–> 179 predict_to_bcolz(m, self.data.fix_dl, act)
180 if len(self.activations[1])!=len(self.data.val_ds):
181 predict_to_bcolz(m, self.data.val_dl, val_act)
/usr/local/lib/python3.6/dist-packages/fastai/model.py in predict_to_bcolz(m, gen, arr, workers)
15 lock=threading.Lock()
16 m.eval()
—> 17 for x,*_ in tqdm(gen):
18 y = to_np(m(VV(x)).data)
19 with lock:
/usr/local/lib/python3.6/dist-packages/tqdm/_tqdm.py in iter(self)
935 “”", fp_write=getattr(self.fp, ‘write’, sys.stderr.write))
936
–> 937 for obj in iterable:
938 yield obj
939 # Update and possibly print the progressbar.
/usr/local/lib/python3.6/dist-packages/fastai/dataloader.py in iter(self)
86 # avoid py3.6 issue where queue is infinite and can result in memory exhaustion
87 for c in chunk_iter(iter(self.batch_sampler), self.num_workers*10):
—> 88 for batch in e.map(self.get_batch, c):
89 yield get_tensor(batch, self.pin_memory, self.half)
90
/usr/lib/python3.6/concurrent/futures/_base.py in result_iterator()
584 # Careful not to keep a reference to the popped future
585 if timeout is None:
–> 586 yield fs.pop().result()
587 else:
588 yield fs.pop().result(end_time - time.time())
/usr/lib/python3.6/concurrent/futures/_base.py in result(self, timeout)
423 raise CancelledError()
424 elif self._state == FINISHED:
–> 425 return self.__get_result()
426
427 self._condition.wait(timeout)
/usr/lib/python3.6/concurrent/futures/_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
–> 384 raise self._exception
385 else:
386 return self._result
/usr/lib/python3.6/concurrent/futures/thread.py in run(self)
54
55 try:
—> 56 result = self.fn(*self.args, **self.kwargs)
57 except BaseException as exc:
58 self.future.set_exception(exc)
/usr/local/lib/python3.6/dist-packages/fastai/dataloader.py in get_batch(self, indices)
73
74 def get_batch(self, indices):
—> 75 res = self.np_collate([self.dataset[i] for i in indices])
76 if self.transpose: res[0] = res[0].T
77 if self.transpose_y: res[1] = res[1].T
/usr/local/lib/python3.6/dist-packages/fastai/dataloader.py in (.0)
73
74 def get_batch(self, indices):
—> 75 res = self.np_collate([self.dataset[i] for i in indices])
76 if self.transpose: res[0] = res[0].T
77 if self.transpose_y: res[1] = res[1].T
/usr/local/lib/python3.6/dist-packages/fastai/dataset.py in getitem(self, idx)
201 xs,ys = zip(*[self.get1item(i) for i in range(*idx.indices(self.n))])
202 return np.stack(xs),ys
–> 203 return self.get1item(idx)
204
205 def len(self): return self.n
/usr/local/lib/python3.6/dist-packages/fastai/dataset.py in get1item(self, idx)
194
195 def get1item(self, idx):
–> 196 x,y = self.get_x(idx),self.get_y(idx)
197 return self.get(self.transform, x, y)
198
/usr/local/lib/python3.6/dist-packages/fastai/dataset.py in get_x(self, i)
297 super().init(transform)
298 def get_sz(self): return self.transform.sz
–> 299 def get_x(self, i): return open_image(os.path.join(self.path, self.fnames[i]))
300 def get_n(self): return len(self.fnames)
301
/usr/local/lib/python3.6/dist-packages/fastai/dataset.py in open_image(fn)
266 elif os.path.isdir(fn) and not str(fn).startswith(“http”):
267 raise OSError(‘Is a directory: {}’.format(fn))
–> 268 elif isdicom(fn):
269 slice = pydicom.read_file(fn)
270 if slice.PhotometricInterpretation.startswith(‘MONOCHROME’):
/usr/local/lib/python3.6/dist-packages/fastai/dataset.py in isdicom(fn)
250 with open(fn) as fh:
251 fh.seek(0x80)
–> 252 return fh.read(4)==‘DICM’
253
254 def open_image(fn):
/usr/lib/python3.6/encodings/ascii.py in decode(self, input, final)
24 class IncrementalDecoder(codecs.IncrementalDecoder):
25 def decode(self, input, final=False):
—> 26 return codecs.ascii_decode(input, self.errors)[0]
27
28 class StreamWriter(Codec,codecs.StreamWriter):
UnicodeDecodeError: ‘ascii’ codec can’t decode byte 0xff in position 30: ordinal not in range(128)
Reverting to cb121994872fbd5f4ee67de01bcb9848a7e54a6b causes lesson1 to start working again.