Lesson 11 notebook 08_data_block.ipynb throws BrokenPipeError

After applying the corrections identified by @exynos7 and generating updated versions of nb_05.py and nb_05a.py, I still cannot get Lesson 11 notebook 08_data_block.ipynb to run successfully.

In the Modeling section of the notebook 08_data_block.ipynb, the 2nd line of code makes the data loaders:
train_dl,valid_dl = get_dls(ll.train,ll.valid,bs, num_workers=4)

The next line should grab the next batch, but instead throws a BrokenPipeError:
x,y = next(iter(train_dl))


BrokenPipeError Traceback (most recent call last)
in
----> 1 x,y = next(iter(train_dl))

~\Anaconda3\envs\fastai\lib\site-packages\torch\utils\data\dataloader.py in iter(self)
276 return _SingleProcessDataLoaderIter(self)
277 else:
–> 278 return _MultiProcessingDataLoaderIter(self)
279
280 @property

~\Anaconda3\envs\fastai\lib\site-packages\torch\utils\data\dataloader.py in init(self, loader)
680 # before it starts, and del tries to join but will get:
681 # AssertionError: can only join a started process.
–> 682 w.start()
683 self.index_queues.append(index_queue)
684 self.workers.append(w)

~\Anaconda3\envs\fastai\lib\multiprocessing\process.py in start(self)
110 ‘daemonic processes are not allowed to have children’
111 _cleanup()
–> 112 self._popen = self._Popen(self)
113 self._sentinel = self._popen.sentinel
114 # Avoid a refcycle if the target function holds an indirect

~\Anaconda3\envs\fastai\lib\multiprocessing\context.py in _Popen(process_obj)
221 @staticmethod
222 def _Popen(process_obj):
–> 223 return _default_context.get_context().Process._Popen(process_obj)
224
225 class DefaultContext(BaseContext):

~\Anaconda3\envs\fastai\lib\multiprocessing\context.py in _Popen(process_obj)
320 def _Popen(process_obj):
321 from .popen_spawn_win32 import Popen
–> 322 return Popen(process_obj)
323
324 class SpawnContext(BaseContext):

~\Anaconda3\envs\fastai\lib\multiprocessing\popen_spawn_win32.py in init(self, process_obj)
87 try:
88 reduction.dump(prep_data, to_child)
—> 89 reduction.dump(process_obj, to_child)
90 finally:
91 set_spawning_popen(None)

~\Anaconda3\envs\fastai\lib\multiprocessing\reduction.py in dump(obj, file, protocol)
58 def dump(obj, file, protocol=None):
59 ‘’‘Replacement for pickle.dump() using ForkingPickler.’’’
—> 60 ForkingPickler(file, protocol).dump(obj)
61
62 #

BrokenPipeError: [Errno 32] Broken pipe

====================================================
If I change num_workers from 4 to 0 in the 2nd line,
train_dl,valid_dl = get_dls(ll.train,ll.valid,bs, num_workers=0),

then the 3rd line grabs a batch without throwing an error.

Then, the subsequent lines of code execute smoothly until the 3rd to the last line, which throws a BrokenPipeError:
model_summary(run, learn, data)


BrokenPipeError Traceback (most recent call last)
in
----> 1 model_summary(run, learn, data) # throws BrokenPipeError: [Errno 32] Broken pipe

in model_summary(run, learn, data, find_all)
1 #export
2 def model_summary(run, learn, data, find_all=False):
----> 3 xb,yb = get_batch(data.valid_dl, run)
4 device = next(learn.model.parameters()).device#Model may not be on the GPU yet
5 xb,yb = xb.to(device),yb.to(device)

~\fastai\course-v3\nbs\dl2\exp\nb_07a.py in get_batch(dl, run)
8
9 def get_batch(dl, run):
—> 10 run.xb,run.yb = next(iter(dl))
11 for cb in run.cbs: cb.set_runner(run)
12 run(‘begin_batch’)

~\Anaconda3\envs\fastai\lib\site-packages\torch\utils\data\dataloader.py in iter(self)
276 return _SingleProcessDataLoaderIter(self)
277 else:
–> 278 return _MultiProcessingDataLoaderIter(self)
279
280 @property

~\Anaconda3\envs\fastai\lib\site-packages\torch\utils\data\dataloader.py in init(self, loader)
680 # before it starts, and del tries to join but will get:
681 # AssertionError: can only join a started process.
–> 682 w.start()
683 self.index_queues.append(index_queue)
684 self.workers.append(w)

~\Anaconda3\envs\fastai\lib\multiprocessing\process.py in start(self)
110 ‘daemonic processes are not allowed to have children’
111 _cleanup()
–> 112 self._popen = self._Popen(self)
113 self._sentinel = self._popen.sentinel
114 # Avoid a refcycle if the target function holds an indirect

~\Anaconda3\envs\fastai\lib\multiprocessing\context.py in _Popen(process_obj)
221 @staticmethod
222 def _Popen(process_obj):
–> 223 return _default_context.get_context().Process._Popen(process_obj)
224
225 class DefaultContext(BaseContext):

~\Anaconda3\envs\fastai\lib\multiprocessing\context.py in _Popen(process_obj)
320 def _Popen(process_obj):
321 from .popen_spawn_win32 import Popen
–> 322 return Popen(process_obj)
323
324 class SpawnContext(BaseContext):

~\Anaconda3\envs\fastai\lib\multiprocessing\popen_spawn_win32.py in init(self, process_obj)
87 try:
88 reduction.dump(prep_data, to_child)
—> 89 reduction.dump(process_obj, to_child)
90 finally:
91 set_spawning_popen(None)

~\Anaconda3\envs\fastai\lib\multiprocessing\reduction.py in dump(obj, file, protocol)
58 def dump(obj, file, protocol=None):
59 ‘’‘Replacement for pickle.dump() using ForkingPickler.’’’
—> 60 ForkingPickler(file, protocol).dump(obj)
61
62 #

BrokenPipeError: [Errno 32] Broken pipe

Has anyone encountered errors like these?
I should mention that I’m running Windows 10 64-bit