So my learn.validate()
runs just fine and returns my metrics and loss … but this:
probs, targs, loss = learn.get_preds(dl=dls.valid, with_loss=True)
throws an excpetion: RuntimeError: shape '[80, -1]' is invalid for input of size 1
Not really sure why this is happening when I include with_loss
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
~/development/test_app/_libs/fastai2/fastai2/learner.py in _do_epoch_validate(self, ds_idx, dl)
182 self.dl = dl; self('begin_validate')
--> 183 with torch.no_grad(): self.all_batches()
184 except CancelValidException: self('after_cancel_validate')
~/development/test_app/_libs/fastai2/fastai2/learner.py in all_batches(self)
152 self.n_iter = len(self.dl)
--> 153 for o in enumerate(self.dl): self.one_batch(*o)
154
~/development/test_app/_libs/fastai2/fastai2/learner.py in one_batch(self, i, b)
166 except CancelBatchException: self('after_cancel_batch')
--> 167 finally: self('after_batch')
168
~/development/test_app/_libs/fastai2/fastai2/learner.py in __call__(self, event_name)
133
--> 134 def __call__(self, event_name): L(event_name).map(self._call_one)
135 def _call_one(self, event_name):
~/development/test_app/_libs/fastcore/fastcore/foundation.py in map(self, f, *args, **kwargs)
375 else f.__getitem__)
--> 376 return self._new(map(g, self))
377
~/development/test_app/_libs/fastcore/fastcore/foundation.py in _new(self, items, *args, **kwargs)
326 def _xtra(self): return None
--> 327 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
328 def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
~/development/test_app/_libs/fastcore/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
46
---> 47 res = super().__call__(*((x,) + args), **kwargs)
48 res._newchk = 0
~/development/test_app/_libs/fastcore/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
317 if (use_list is not None) or not _is_array(items):
--> 318 items = list(items) if use_list else _listify(items)
319 if match is not None:
~/development/test_app/_libs/fastcore/fastcore/foundation.py in _listify(o)
253 if isinstance(o, str) or _is_array(o): return [o]
--> 254 if is_iter(o): return list(o)
255 return [o]
~/development/test_app/_libs/fastcore/fastcore/foundation.py in __call__(self, *args, **kwargs)
219 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 220 return self.fn(*fargs, **kwargs)
221
~/development/test_app/_libs/fastai2/fastai2/learner.py in _call_one(self, event_name)
136 assert hasattr(event, event_name)
--> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
~/development/test_app/_libs/fastai2/fastai2/learner.py in <listcomp>(.0)
136 assert hasattr(event, event_name)
--> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
~/development/test_app/_libs/fastai2/fastai2/callback/core.py in __call__(self, event_name)
23 (self.run_valid and not getattr(self, 'training', False)))
---> 24 if self.run and _run: getattr(self, event_name, noop)()
25 if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
~/development/test_app/_libs/fastai2/fastai2/callback/core.py in after_batch(self)
89 bs = find_bs(self.yb)
---> 90 loss = self.loss if self.loss.numel() == bs else self.loss.view(bs,-1).mean(1)
91 self.losses.append(to_detach(loss))
RuntimeError: shape '[80, -1]' is invalid for input of size 1
During handling of the above exception, another exception occurred:
IndexError Traceback (most recent call last)
<ipython-input-159-aad9a534b0bf> in <module>
----> 1 probs, targs, loss = learn.get_preds(dl=dls.valid, with_loss=True)
2
3 print(f'Validation Loss: {loss.mean()}')
4 # print(f'Validation Loss (per label): {loss.mean(dim=0)}') # ... no longer works (see forum comment from sylvain)
~/development/test_app/_libs/fastai2/fastai2/learner.py in get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, reorder, **kwargs)
227 for mgr in ctx_mgrs: stack.enter_context(mgr)
228 self(event.begin_epoch if inner else _before_epoch)
--> 229 self._do_epoch_validate(dl=dl)
230 self(event.after_epoch if inner else _after_epoch)
231 if act is None: act = getattr(self.loss_func, 'activation', noop)
~/development/test_app/_libs/fastai2/fastai2/learner.py in _do_epoch_validate(self, ds_idx, dl)
183 with torch.no_grad(): self.all_batches()
184 except CancelValidException: self('after_cancel_validate')
--> 185 finally: self('after_validate')
186
187 @log_args(but='cbs')
~/development/test_app/_libs/fastai2/fastai2/learner.py in __call__(self, event_name)
132 def ordered_cbs(self, event): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, event)]
133
--> 134 def __call__(self, event_name): L(event_name).map(self._call_one)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)
~/development/test_app/_libs/fastcore/fastcore/foundation.py in map(self, f, *args, **kwargs)
374 else f.format if isinstance(f,str)
375 else f.__getitem__)
--> 376 return self._new(map(g, self))
377
378 def filter(self, f, negate=False, **kwargs):
~/development/test_app/_libs/fastcore/fastcore/foundation.py in _new(self, items, *args, **kwargs)
325 @property
326 def _xtra(self): return None
--> 327 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
328 def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
329 def copy(self): return self._new(self.items.copy())
~/development/test_app/_libs/fastcore/fastcore/foundation.py in __call__(cls, x, *args, **kwargs)
45 return x
46
---> 47 res = super().__call__(*((x,) + args), **kwargs)
48 res._newchk = 0
49 return res
~/development/test_app/_libs/fastcore/fastcore/foundation.py in __init__(self, items, use_list, match, *rest)
316 if items is None: items = []
317 if (use_list is not None) or not _is_array(items):
--> 318 items = list(items) if use_list else _listify(items)
319 if match is not None:
320 if is_coll(match): match = len(match)
~/development/test_app/_libs/fastcore/fastcore/foundation.py in _listify(o)
252 if isinstance(o, list): return o
253 if isinstance(o, str) or _is_array(o): return [o]
--> 254 if is_iter(o): return list(o)
255 return [o]
256
~/development/test_app/_libs/fastcore/fastcore/foundation.py in __call__(self, *args, **kwargs)
218 if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
219 fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 220 return self.fn(*fargs, **kwargs)
221
222 # Cell
~/development/test_app/_libs/fastai2/fastai2/learner.py in _call_one(self, event_name)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)
--> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)
~/development/test_app/_libs/fastai2/fastai2/learner.py in <listcomp>(.0)
135 def _call_one(self, event_name):
136 assert hasattr(event, event_name)
--> 137 [cb(event_name) for cb in sort_by_run(self.cbs)]
138
139 def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)
~/development/test_app/_libs/fastai2/fastai2/callback/core.py in __call__(self, event_name)
22 _run = (event_name not in _inner_loop or (self.run_train and getattr(self, 'training', True)) or
23 (self.run_valid and not getattr(self, 'training', False)))
---> 24 if self.run and _run: getattr(self, event_name, noop)()
25 if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
26
~/development/test_app/_libs/fastai2/fastai2/callback/core.py in after_validate(self)
96 if not self.save_preds: self.preds = detuplify(to_concat(self.preds, dim=self.concat_dim))
97 if not self.save_targs: self.targets = detuplify(to_concat(self.targets, dim=self.concat_dim))
---> 98 if self.with_loss: self.losses = to_concat(self.losses)
99
100 def all_tensors(self):
~/development/test_app/_libs/fastai2/fastai2/torch_core.py in to_concat(xs, dim)
211 def to_concat(xs, dim=0):
212 "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213 if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
214 if isinstance(xs[0],dict): return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs[0].keys()}
215 #We may receives xs that are not concatenatable (inputs of a text classifier for instance),
IndexError: list index out of range