Lesson 2 Error- unexpected keyword argument 'mp_context'

Hey peeps, i’ve been bashing my head against the wall for an hour now. I’m trying to run a cell on the google colab lesson #2, specifically:

bear_types = 'grizzly','black','teddy'
path = Path('bears')
if not path.exists():
    for o in bear_types:
        dest = (path/o)
        results = search_images_bing(key, f'{o} bear')
        download_images(dest, urls=results.attrgot('content_url'))

but i am getting the following error:


TypeError                                 Traceback (most recent call last)

<ipython-input-5-f86506dc644b> in <module>()
      5         dest.mkdir(exist_ok=True)
      6         results = search_images_bing(key, f'{o} bear')
----> 7         download_images(dest, urls=results.attrgot('content_url'))

2 frames

/usr/local/lib/python3.6/dist-packages/fastai/vision/utils.py in download_images(dest, url_file, urls, max_pics, n_workers, timeout)
     22     dest = Path(dest)
     23     dest.mkdir(exist_ok=True)
---> 24     parallel(partial(_download_image_inner, dest, timeout=timeout), list(enumerate(urls)), n_workers=n_workers)
     26 # Cell

/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in parallel(f, items, n_workers, total, progress, pause, timeout, chunksize, *args, **kwargs)
    704     "Applies `func` in parallel to `items`, using `n_workers`"
    705     if progress is None: progress = progress_bar is not None
--> 706     with ProcessPoolExecutor(n_workers, pause=pause) as ex:
    707         r = ex.map(f,items, *args, timeout=timeout, chunksize=chunksize, **kwargs)
    708         if progress:

/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in __init__(self, max_workers, on_exc, pause, mp_context, initializer, initargs)
    685         self.not_parallel = max_workers==0
    686         if self.not_parallel: max_workers=1
--> 687         super().__init__(max_workers, mp_context=mp_context, initializer=initializer, initargs=initargs)
    689     def map(self, f, items, timeout=None, chunksize=1, *args, **kwargs):

TypeError: __init__() got an unexpected keyword argument 'mp_context'

Can anyone tell me where i’ve gone wrong?


Hi there, same problem here.

Seems to be an issue with Python3.6 not supporting it yet (but if it so, it should have also not worked before?)

Anyway, here is a hacky workaround:

def download_image_inner_mine(dest, inp, timeout=4):
    i,url = inp
    suffix = re.findall(r'\.\w+?(?=(?:\?|$))', url)
    suffix = suffix[0] if len(suffix)>0  else '.jpg'
    try: download_url(url, dest/f"{i:08d}{suffix}", overwrite=True, show_progress=False, timeout=timeout)
    except Exception as e: f"Couldn't download {url}."
def download_images_mine(dest, urls):
  dest = Path(dest)
  for i, url in enumerate(urls):
    download_image_inner_mine( dest, [i, url], timeout=4)

if not path.exists():
    for o in bear_types:
        dest = (path/o)
        results = search_images_bing(key, f'{o} bear')
        print(f'{o} bear')
        download_images_mine(dest, urls=results.attrgot('content_url'))

Above code should download everything as before.

Then to remove files that cannot be opened:

fns = get_image_files(path)
[fn.unlink() for fn in fns if not verify_image(fn)]

Later on, before executing cleaner, add a cell with:

def __getitem__(self:Box, i): return self.children[i]

# Cell
def widget(im, *args, **layout):
    "Convert anything that can be `display`ed by IPython into a widget"
    o = Output(layout=merge(*args, layout))
    with o: display(im)
    return o

# Cell
def _update_children(change):
    for o in change['owner'].children:
        if not o.layout.flex: o.layout.flex = '0 0 auto'

# Cell
def carousel(children=(), **layout):
    "A horizontally scrolling carousel"
    def_layout = dict(overflow='scroll hidden', flex_flow='row', display='flex')
    res = Box([], layout=merge(def_layout, layout))
    res.observe(_update_children, names='children')
    res.children = children
    return res

# Cell
def _open_thumb(fn, h, w): return Image.open(fn).to_thumb(h, w).convert('RGBA')

def _get_iw_info(learn, ds_idx=0):
    dl = learn.dls[ds_idx].new(shuffle=False, drop_last=False)
    inp,probs,targs,preds,losses = learn.get_preds(dl=dl, with_input=True, with_loss=True, with_decoded=True)
    inp,targs = L(zip(*dl.decode_batch((inp,targs), max_n=9999)))
    return L([dl.dataset.items,targs,losses]).zip()
class ImagesCleaner:
    "A widget that displays all images in `fns` along with a `Dropdown`"
    def __init__(self, opts=(), height=128, width=256, max_n=30):
        opts = ('<Keep>', '<Delete>')+tuple(opts)
        self.widget = carousel(width='100%')

    def set_fns(self, fns):
        self.fns = L(fns)[:self.max_n]
        ims = [_open_thumb(fn, self.height, self.width) for fn in  self.fns]
        self.widget.children = [VBox([widget(im, height=f'{self.height}px'), Dropdown(
            options=self.opts, layout={'width': 'max-content'})]) for im in ims]

    def _ipython_display_(self): display(self.widget)
    def values(self): return L(self.widget.children).itemgot(1).attrgot('value')
    def delete(self): return self.values().argwhere(eq('<Delete>'))
    def change(self):
        idxs = self.values().argwhere(negate_func(in_(['<Delete>','<Keep>'])))
        return idxs.zipwith(self.values()[idxs])
class ImageClassifierCleaner(GetAttr):
    "A widget that provides an `ImagesCleaner` with a CNN `Learner`"
    def __init__(self, learn, **kwargs):
        vocab = learn.dls.vocab
        self.default = self.iw = ImagesCleaner(vocab, **kwargs)
        self.dd_cats = Dropdown(options=vocab)
        self.dd_ds   = Dropdown(options=('Train','Valid'))
        self.iwis = _get_iw_info(learn,0),_get_iw_info(learn,1)
        self.dd_ds.observe(self.on_change_ds, 'value')
        self.dd_cats.observe(self.on_change_ds, 'value')
        self.widget = VBox([self.dd_cats, self.dd_ds, self.iw.widget])

    def _ipython_display_(self): display(self.widget)
    def on_change_ds(self, change=None):
        info = L(o for o in self.iwis[self.dd_ds.index] if o[1]==self.dd_cats.value)
        self.iw.set_fns(info.sorted(2, reverse=True).itemgot(0))

This error seems to have just appeared this morning. I ran the Lesson 2 notebook last night (Sep 14th, about 7PM Eastern Time) and download_images() worked without issue. Now (Sep 15th, 11AM ET) I’m seeing this error.

Either something changed on colab or the fastai library was updated and broke something. I ran the exact same code last night and the download worked.

Update: I ran the exact same code on Paperspace Gradient and it worked. So it looks like a Colab issue.


I am experiencing the same error with nbdev, during the “Run tests” phase (GitHub Actions). I was not able to find any other relevant thread, so adding this here, as it might be related.

Sorry if repeating, I’ve just joined the forum.

Run nbdev_test_nbs

Traceback (most recent call last):
File “/opt/hostedtoolcache/Python/3.6.12/x64/bin/nbdev_test_nbs”, line 8, in
File “/opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages/fastscript/core.py”, line 76, in _f
File “/opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages/nbdev/cli.py”, line 66, in nbdev_test_nbs
results = parallel(_test_one, files, flags=flags, verbose=verbose, n_workers=n_workers, pause=pause)
File “/opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages/fastcore/utils.py”, line 706, in parallel
with ProcessPoolExecutor(n_workers, pause=pause) as ex:
File “/opt/hostedtoolcache/Python/3.6.12/x64/lib/python3.6/site-packages/fastcore/utils.py”, line 687, in init
super().init(max_workers, mp_context=mp_context, initializer=initializer, initargs=initargs)
TypeError: init() got an unexpected keyword argument ‘mp_context’
##[error]Process completed with exit code 1.

1 Like

TypeError: init() got an unexpected keyword argument ‘mp_context’. This error might be related with latest fastcore version. nbdev_test_nbs runs fine with fastcore=1.0.9.

As a temp workaround you may change .github/workflows/main.yml as:

    - name: Install the library
      run: |
        pip install nbdev jupyter
        pip install -e .
        pip install fastcore==1.0.9 # added this line

I changed the first cell in the notebook to:

!pip install -Uqq fastbook
!pip install fastcore==1.0.9
import fastbook

And it looks to be working correctly now. Thanks for your help!


This works. Thank you for the solution, Kerem.

Many apologies - that should be fixed now.


I can verify that download_images() it is working in Colab now. There is no need to use an older version of fastcore. Thanks Jeremy!

I am still facing the same issue here. Run tests still give same error:

TypeError: __init__() got an unexpected keyword argument 'mp_context'

Yeah, me too.



I am still seeing the issue.

I tried creating a new clean Colab doc to avoid Google caching an old import etc, and I’m using these imports:

!pip install -Uqq fastbook
import fastbook
from fastbook import *
import shutil

And I find that this line:
download_images(dest, urls=results.attrgot('content_url'))

Returns this error:

/usr/local/lib/python3.6/dist-packages/fastcore/utils.py in __init__(self, max_workers, on_exc, pause, mp_context, initializer, initargs)
    691         self.not_parallel = max_workers==0
    692         if self.not_parallel: max_workers=1
--> 693         super().__init__(max_workers, mp_context=mp_context, initializer=initializer, initargs=initargs)
    695     def map(self, f, items, timeout=None, chunksize=1, *args, **kwargs):

TypeError: __init__() got an unexpected keyword argument 'mp_context'

When I run pip freeze:
!pip freeze | grep fast

I see these versions:


Hope this helps

It works with fastcore==1.0.12. The problem came back into 1.0.13 in the definition of ProcessPoolExecutor.

1 Like


Neither fastcore=1.0.9 nore 1.0.12 solution is working for me:

!pip install -Uqq fastbook
!pip install fastcore==1.0.12
import fastbook

Here’s pip freeze output


Thank you.

1 Like

Thanks man, extremely helpful.

also still having this problem as of today

Edit: admin on discord told me to try upgrading python to 3.7

I can also verify that downgrading fastcore works as well

Thank you. I don’t have this issue any more.