Ubuntu urlopen problem (cannot load torchvision models)

I know it is not a fastai problem, but any help woud be appreciated.
I am trying to download torchvision models and I am getting an error from urlopen (from urlib.requests).
Minimal code:

from torchvision.models import *
r18 = resnet18(True)
>>Downloading: "https://download.pytorch.org/models/resnet18-5c106cde.pth" to /home/tc256760/.cache/torch/checkpoints/resnet18-5c106cde.pth
---------------------------------------------------------------------------
gaierror                                  Traceback (most recent call last)
~/anaconda3/envs/fastai-master/lib/python3.7/urllib/request.py in do_open(self, http_class, req, **http_conn_args)
   1316                 h.request(req.get_method(), req.selector, req.data, headers,
-> 1317                           encode_chunked=req.has_header('Transfer-encoding'))
   1318             except OSError as err: # timeout error

~/anaconda3/envs/fastai-master/lib/python3.7/http/client.py in request(self, method, url, body, headers, encode_chunked)
   1228         """Send a complete request to the server."""
-> 1229         self._send_request(method, url, body, headers, encode_chunked)
   1230 

~/anaconda3/envs/fastai-master/lib/python3.7/http/client.py in _send_request(self, method, url, body, headers, encode_chunked)
   1274             body = _encode(body, 'body')
-> 1275         self.endheaders(body, encode_chunked=encode_chunked)
   1276 

~/anaconda3/envs/fastai-master/lib/python3.7/http/client.py in endheaders(self, message_body, encode_chunked)
   1223             raise CannotSendHeader()
-> 1224         self._send_output(message_body, encode_chunked=encode_chunked)
   1225 

~/anaconda3/envs/fastai-master/lib/python3.7/http/client.py in _send_output(self, message_body, encode_chunked)
   1015         del self._buffer[:]
-> 1016         self.send(msg)
   1017 

~/anaconda3/envs/fastai-master/lib/python3.7/http/client.py in send(self, data)
    955             if self.auto_open:
--> 956                 self.connect()
    957             else:

~/anaconda3/envs/fastai-master/lib/python3.7/http/client.py in connect(self)
   1383 
-> 1384             super().connect()
   1385 

~/anaconda3/envs/fastai-master/lib/python3.7/http/client.py in connect(self)
    927         self.sock = self._create_connection(
--> 928             (self.host,self.port), self.timeout, self.source_address)
    929         self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)

~/anaconda3/envs/fastai-master/lib/python3.7/socket.py in create_connection(address, timeout, source_address)
    706     err = None
--> 707     for res in getaddrinfo(host, port, 0, SOCK_STREAM):
    708         af, socktype, proto, canonname, sa = res

~/anaconda3/envs/fastai-master/lib/python3.7/socket.py in getaddrinfo(host, port, family, type, proto, flags)
    747     addrlist = []
--> 748     for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
    749         af, socktype, proto, canonname, sa = res

gaierror: [Errno -2] Name or service not known

During handling of the above exception, another exception occurred:

URLError                                  Traceback (most recent call last)
<ipython-input-13-5d3203569518> in <module>
----> 1 r18 = resnet18(True)

~/anaconda3/envs/fastai-master/lib/python3.7/site-packages/torchvision/models/resnet.py in resnet18(pretrained, progress, **kwargs)
    224     """
    225     return _resnet('resnet18', BasicBlock, [2, 2, 2, 2], pretrained, progress,
--> 226                    **kwargs)
    227 
    228 

~/anaconda3/envs/fastai-master/lib/python3.7/site-packages/torchvision/models/resnet.py in _resnet(arch, block, layers, pretrained, progress, **kwargs)
    211     if pretrained:
    212         state_dict = load_state_dict_from_url(model_urls[arch],
--> 213                                               progress=progress)
    214         model.load_state_dict(state_dict)
    215     return model

~/anaconda3/envs/fastai-master/lib/python3.7/site-packages/torch/hub.py in load_state_dict_from_url(url, model_dir, map_location, progress)
    431         sys.stderr.write('Downloading: "{}" to {}\n'.format(url, cached_file))
    432         hash_prefix = HASH_REGEX.search(filename).group(1)
--> 433         _download_url_to_file(url, cached_file, hash_prefix, progress=progress)
    434     return torch.load(cached_file, map_location=map_location)

~/anaconda3/envs/fastai-master/lib/python3.7/site-packages/torch/hub.py in _download_url_to_file(url, dst, hash_prefix, progress)
    347 def _download_url_to_file(url, dst, hash_prefix, progress):
    348     file_size = None
--> 349     u = urlopen(url)
    350     meta = u.info()
    351     if hasattr(meta, 'getheaders'):

~/anaconda3/envs/fastai-master/lib/python3.7/urllib/request.py in urlopen(url, data, timeout, cafile, capath, cadefault, context)
    220     else:
    221         opener = _opener
--> 222     return opener.open(url, data, timeout)
    223 
    224 def install_opener(opener):

~/anaconda3/envs/fastai-master/lib/python3.7/urllib/request.py in open(self, fullurl, data, timeout)
    523             req = meth(req)
    524 
--> 525         response = self._open(req, data)
    526 
    527         # post-process response

~/anaconda3/envs/fastai-master/lib/python3.7/urllib/request.py in _open(self, req, data)
    541         protocol = req.type
    542         result = self._call_chain(self.handle_open, protocol, protocol +
--> 543                                   '_open', req)
    544         if result:
    545             return result

~/anaconda3/envs/fastai-master/lib/python3.7/urllib/request.py in _call_chain(self, chain, kind, meth_name, *args)
    501         for handler in handlers:
    502             func = getattr(handler, meth_name)
--> 503             result = func(*args)
    504             if result is not None:
    505                 return result

~/anaconda3/envs/fastai-master/lib/python3.7/urllib/request.py in https_open(self, req)
   1358         def https_open(self, req):
   1359             return self.do_open(http.client.HTTPSConnection, req,
-> 1360                 context=self._context, check_hostname=self._check_hostname)
   1361 
   1362         https_request = AbstractHTTPHandler.do_request_

~/anaconda3/envs/fastai-master/lib/python3.7/urllib/request.py in do_open(self, http_class, req, **http_conn_args)
   1317                           encode_chunked=req.has_header('Transfer-encoding'))
   1318             except OSError as err: # timeout error
-> 1319                 raise URLError(err)
   1320             r = h.getresponse()
   1321         except:

URLError: <urlopen error [Errno -2] Name or service not known>

I opened ipdb right after, and the line causing the problem is:

u = urlopen("https://download.pytorch.org/models/resnet18-5c106cde.pth")

In my windows machine it works, in my Ubuntu machine it does not. I tried a fresh conda env with no luck. Any idea how to debug this error?
I can urlopen('https://www.google.com") without problem.
It is my new Workstation and I am unable to load pretrained models…

How did you solve this error? Because I am getting the same error.

It was my firewall that was blocking this, but don’t know why.

So @tcapelle How did you resolve this?

I did not, I don’t have control over the firewall. I ended up downloading by hand to the pretrained models folder the archs I use often, resne18, resnet34, etc…

1 Like