Hi all,
I was following the excellent notebook from Zachary:
And wanted to give it a try with my own data. Unfortunately, despite matching the format of the bboxes, I think exactly, I run into misalignment issues: I try to replicate the format displayed below:
So the coordinate system seems to be [Upper Left (x,y), Lower Right (x,y) ], but when I put any other data into this format, the
show_batch
method puts the bounding boxes all over the place. I have tried number of different ways and even wrote my own plotting function, but was unable to get the same results as in the notebook. Any ideas on how this might be fixed?
I am using fastai 0.0.12
.
Also when I run learn.show_results()
on Zachary’s original notebook, I get the following error:
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in to_concat(xs, dim)
216 # in this case we return a big list
--> 217 try: return retain_type(torch.cat(xs, dim=dim), xs[0])
218 except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
TypeError: expected Tensor as element 0 in argument 0, but got int
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-38-c3b657dcc9ae> in <module>()
----> 1 learn.show_results()
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/learner.py in show_results(self, ds_idx, dl, max_n, shuffle, **kwargs)
224 if dl is None: dl = self.dls[ds_idx].new(shuffle=shuffle)
225 b = dl.one_batch()
--> 226 _,_,preds = self.get_preds(dl=[b], with_decoded=True)
227 self.dls.show_results(b, preds, max_n=max_n, **kwargs)
228
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/learner.py in get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, **kwargs)
202 self(event.begin_epoch if inner else _before_epoch)
203 self._do_epoch_validate(dl=dl)
--> 204 self(event.after_epoch if inner else _after_epoch)
205 if act is None: act = getattr(self.loss_func, 'activation', noop)
206 res = cb.all_tensors()
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/learner.py in __call__(self, event_name)
106 def ordered_cbs(self, cb_func): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, cb_func)]
107
--> 108 def __call__(self, event_name): L(event_name).map(self._call_one)
109 def _call_one(self, event_name):
110 assert hasattr(event, event_name)
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastcore/foundation.py in map(self, f, *args, **kwargs)
360 else f.format if isinstance(f,str)
361 else f.__getitem__)
--> 362 return self._new(map(g, self))
363
364 def filter(self, f, negate=False, **kwargs):
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
313 @property
314 def _xtra(self): return None
--> 315 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
316 def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
317 def copy(self): return self._new(self.items.copy())
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
39 return x
40
---> 41 res = super().__call__(*((x,) + args), **kwargs)
42 res._newchk = 0
43 return res
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
304 if items is None: items = []
305 if (use_list is not None) or not _is_array(items):
--> 306 items = list(items) if use_list else _listify(items)
307 if match is not None:
308 if is_coll(match): match = len(match)
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastcore/foundation.py in _listify(o)
240 if isinstance(o, list): return o
241 if isinstance(o, str) or _is_array(o): return [o]
--> 242 if is_iter(o): return list(o)
243 return [o]
244
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastcore/foundation.py in __call__(self, *args, **kwargs)
206 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
207 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 208 return self.fn(*fargs, **kwargs)
209
210 # Cell
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/learner.py in _call_one(self, event_name)
109 def _call_one(self, event_name):
110 assert hasattr(event, event_name)
--> 111 [cb(event_name) for cb in sort_by_run(self.cbs)]
112
113 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/learner.py in <listcomp>(.0)
109 def _call_one(self, event_name):
110 assert hasattr(event, event_name)
--> 111 [cb(event_name) for cb in sort_by_run(self.cbs)]
112
113 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/callback/core.py in __call__(self, event_name)
21 _run = (event_name not in _inner_loop or (self.run_train and getattr(self, 'training', True)) or
22 (self.run_valid and not getattr(self, 'training', False)))
---> 23 if self.run and _run: getattr(self, event_name, noop)()
24 if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
25
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/callback/core.py in after_fit(self)
93 "Concatenate all recorded tensors"
94 if self.with_input: self.inputs = detuplify(to_concat(self.inputs, dim=self.concat_dim))
---> 95 if not self.save_preds: self.preds = detuplify(to_concat(self.preds, dim=self.concat_dim))
96 if not self.save_targs: self.targets = detuplify(to_concat(self.targets, dim=self.concat_dim))
97 if self.with_loss: self.losses = to_concat(self.losses)
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in to_concat(xs, dim)
211 def to_concat(xs, dim=0):
212 "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213 if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
214 if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs.keys()}
215 #We may receives xs that are not concatenatable (inputs of a text classifier for instance),
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in <listcomp>(.0)
211 def to_concat(xs, dim=0):
212 "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213 if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
214 if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs.keys()}
215 #We may receives xs that are not concatenatable (inputs of a text classifier for instance),
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in to_concat(xs, dim)
211 def to_concat(xs, dim=0):
212 "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213 if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
214 if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs.keys()}
215 #We may receives xs that are not concatenatable (inputs of a text classifier for instance),
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in <listcomp>(.0)
211 def to_concat(xs, dim=0):
212 "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213 if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
214 if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs.keys()}
215 #We may receives xs that are not concatenatable (inputs of a text classifier for instance),
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in to_concat(xs, dim)
211 def to_concat(xs, dim=0):
212 "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213 if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
214 if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs.keys()}
215 #We may receives xs that are not concatenatable (inputs of a text classifier for instance),
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in <listcomp>(.0)
211 def to_concat(xs, dim=0):
212 "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213 if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
214 if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs.keys()}
215 #We may receives xs that are not concatenatable (inputs of a text classifier for instance),
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in to_concat(xs, dim)
217 try: return retain_type(torch.cat(xs, dim=dim), xs[0])
218 except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
--> 219 for i in range_of(o_)) for o_ in xs], L())
220
221 # Cell
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastai2/torch_core.py in <listcomp>(.0)
217 try: return retain_type(torch.cat(xs, dim=dim), xs[0])
218 except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
--> 219 for i in range_of(o_)) for o_ in xs], L())
220
221 # Cell
~/anaconda3/envs/pytorch_p36/lib/python3.6/site-packages/fastcore/utils.py in range_of(x)
162 def range_of(x):
163 "All indices of collection `x` (i.e. `list(range(len(x)))`)"
--> 164 return list(range(len(x)))
165
166 # Cell
TypeError: object of type 'int' has no len()