Hello I am trying to do inference on a FastApi sever with this code:
async def setup_learner():
# await gdl(drive_file_id, path / export_file_name)
#path/export_file_name is on my pysical machine via manual download
try:
learn = torch.load(path/export_file_name, map_location=torch.device('cpu'))
learn.dls.device = 'cpu'
return learn
except RuntimeError as e:
if len(e.args) > 0 and 'CPU-only machine' in e.args[0]:
print(e)
message = "\n\nThis model was trained with an old version of fastai and will not work in a CPU environment.\n\nPlease update the fastai library in your training environment and export your model again.\n\nSee instructions for 'Returning to work' at https://course.fast.ai."
raise RuntimeError(message)
else:
raise
learn = None
@app.on_event("startup")
async def startup_event():
global learn
loop = asyncio.get_event_loop()#get event loop
tasks = [asyncio.ensure_future(setup_learner())] #assign some task
learn = (await asyncio.gather(*tasks))[0] #get tasks
@app.route('/analyze', methods=['POST'])
async def analyze(request):
img_data = await request.form()
in_memory_file = await (img_data['file'].read())
#predict
prediction, loc, prob_classes = learn.predict(in_memory_file)
print(prediction, loc, prob_classes)
return JSONResponse({'msg':'msg'})
And I recieve these two errors
Traceback (most recent call last):-----| 0.00% [0/1 00:00<00:00]
File "<string>", line 1, in <module>
File "/usr/lib/python3.7/multiprocessing/spawn.py", line 105, in spawn_main
exitcode = _main(fd)
File "/usr/lib/python3.7/multiprocessing/spawn.py", line 115, in _main
self = reduction.pickle.load(from_parent)
AttributeError: '_FakeLoader' object has no attribute 'noops'
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "/usr/lib/python3.7/multiprocessing/spawn.py", line 105, in spawn_main
exitcode = _main(fd)
File "/usr/lib/python3.7/multiprocessing/spawn.py", line 115, in _main
self = reduction.pickle.load(from_parent)
AttributeError: '_FakeLoader' object has no attribute 'noops'
INFO: 127.0.0.1:59896 - "POST /analyze HTTP/1.1" 500 Internal Server Error
ERROR: Exception in ASGI application
Traceback (most recent call last):
File "/home/nole/.local/lib/python3.7/site-packages/uvicorn/protocols/http/httptools_impl.py", line 385, in run_asgi
result = await app(self.scope, self.receive, self.send)
File "/home/nole/.local/lib/python3.7/site-packages/uvicorn/middleware/proxy_headers.py", line 45, in __call__
return await self.app(scope, receive, send)
File "/home/nole/.local/lib/python3.7/site-packages/fastapi/applications.py", line 149, in __call__
await super().__call__(scope, receive, send)
File "/home/nole/.local/lib/python3.7/site-packages/starlette/applications.py", line 102, in __call__
await self.middleware_stack(scope, receive, send)
File "/home/nole/.local/lib/python3.7/site-packages/starlette/middleware/errors.py", line 181, in __call__
raise exc from None
File "/home/nole/.local/lib/python3.7/site-packages/starlette/middleware/errors.py", line 159, in __call__
await self.app(scope, receive, _send)
File "/home/nole/.local/lib/python3.7/site-packages/starlette/middleware/cors.py", line 84, in __call__
await self.simple_response(scope, receive, send, request_headers=headers)
File "/home/nole/.local/lib/python3.7/site-packages/starlette/middleware/cors.py", line 140, in simple_response
await self.app(scope, receive, send)
File "/home/nole/.local/lib/python3.7/site-packages/starlette/exceptions.py", line 82, in __call__
raise exc from None
File "/home/nole/.local/lib/python3.7/site-packages/starlette/exceptions.py", line 71, in __call__
await self.app(scope, receive, sender)
File "/home/nole/.local/lib/python3.7/site-packages/starlette/routing.py", line 550, in __call__
await route.handle(scope, receive, send)
File "/home/nole/.local/lib/python3.7/site-packages/starlette/routing.py", line 227, in handle
await self.app(scope, receive, send)
File "/home/nole/.local/lib/python3.7/site-packages/starlette/routing.py", line 41, in app
response = await func(request)
File "./main.py", line 194, in analyze
prediction, loc, prob_classes = learn.predict(in_memory_file)
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/learner.py", line 231, in predict
inp,preds,_,dec_preds = self.get_preds(dl=dl, with_input=True, with_decoded=True)
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/learner.py", line 219, in get_preds
self._do_epoch_validate(dl=dl)
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/learner.py", line 178, in _do_epoch_validate
dl,*_ = change_attrs(dl, names, old, has); self('after_validate')
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/learner.py", line 124, in __call__
def __call__(self, event_name): L(event_name).map(self._call_one)
File "/home/nole/.local/lib/python3.7/site-packages/fastcore/foundation.py", line 372, in map
return self._new(map(g, self))
File "/home/nole/.local/lib/python3.7/site-packages/fastcore/foundation.py", line 323, in _new
def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
File "/home/nole/.local/lib/python3.7/site-packages/fastcore/foundation.py", line 41, in __call__
res = super().__call__(*((x,) + args), **kwargs)
File "/home/nole/.local/lib/python3.7/site-packages/fastcore/foundation.py", line 314, in __init__
items = list(items) if use_list else _listify(items)
File "/home/nole/.local/lib/python3.7/site-packages/fastcore/foundation.py", line 250, in _listify
if is_iter(o): return list(o)
File "/home/nole/.local/lib/python3.7/site-packages/fastcore/foundation.py", line 216, in __call__
return self.fn(*fargs, **kwargs)
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/learner.py", line 127, in _call_one
[cb(event_name) for cb in sort_by_run(self.cbs)]
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/learner.py", line 127, in <listcomp>
[cb(event_name) for cb in sort_by_run(self.cbs)]
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/callback/core.py", line 24, in __call__
if self.run and _run: getattr(self, event_name, noop)()
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/callback/core.py", line 95, in after_validate
if self.with_input: self.inputs = detuplify(to_concat(self.inputs, dim=self.concat_dim))
File "/home/nole/.local/lib/python3.7/site-packages/fastai2/torch_core.py", line 213, in to_concat
if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
IndexError: list index out of range
I’m pretty sure that the learner is all setup as I’ve done print calls to make sure the learner instance is available before predict. I trained the model on colab, without doing .to_fp16
and it worked in that environment. My machine info is as follows:
ubuntu
description: Computer
width: 64 bits
capabilities: smp vsyscall32
*-core
description: Motherboard
physical id: 0
*-memory
description: System memory
physical id: 0
size: 4GiB
*-cpu
product: Intel(R) Pentium(R) Silver N5000 CPU @ 1.10GHz
vendor: Intel Corp.
physical id: 1
bus info: cpu@0
size: 2526MHz
capacity: 2700MHz
width: 64 bits
capabilities: fpu fpu_exception wp vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp x86-64 constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc cpuid aperfmperf tsc_known_freq pni pclmulqdq dtes64 monitor ds_cpl vmx est tm2 ssse3 sdbg cx16 xtpr pdcm sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave rdrand lahf_lm 3dnowprefetch cpuid_fault cat_l2 pti cdp_l2 ssbd ibrs ibpb stibp ibrs_enhanced tpr_shadow vnmi flexpriority ept vpid ept_ad fsgsbase tsc_adjust smep erms mpx rdt_a rdseed smap clflushopt intel_pt sha_ni xsaveopt xsavec xgetbv1 xsaves dtherm ida arat pln pts umip rdpid md_clear arch_capabilities cpufreq