Leaner.get_preds() error when model returns tuple

My model returns a tuple of size two. The first element is simply a tensor of shape [batch, 1] and the other element of the tuple is a tensor of shape [batch, max_length] where max_length can change between batch. I have a custom loss function that seems work. The learner trains properly to completion. When I do learner.get_preds(), the preds seems to get calculated but then I receive this exception:

---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
c:\work\ml\fastai2\fastai2\torch_core.py in to_concat(xs, dim)
    216     #   in this case we return a big list
--> 217     try:    return retain_type(torch.cat(xs, dim=dim), xs[0])
    218     except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])

RuntimeError: invalid argument 0: Sizes of tensors must match except in dimension 0. Got 162 and 163 in dimension 1 at C:\w\1\s\tmp_conda_3.7_100118\conda\conda-bld\pytorch_1579082551706\work\aten\src\TH/generic/THTensor.cpp:612

During handling of the above exception, another exception occurred:

RuntimeError                              Traceback (most recent call last)
<ipython-input-262-e640bdd31b27> in <module>
      1 # %debug
----> 2 preds = learner.get_preds()

c:\work\ml\fastai2\fastai2\learner.py in get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, **kwargs)
    202             self(event.begin_epoch if inner else _before_epoch)
    203             self._do_epoch_validate(dl=dl)
--> 204             self(event.after_epoch if inner else _after_epoch)
    205             if act is None: act = getattr(self.loss_func, 'activation', noop)
    206             res = cb.all_tensors()

c:\work\ml\fastai2\fastai2\learner.py in __call__(self, event_name)
    106     def ordered_cbs(self, cb_func): return [cb for cb in sort_by_run(self.cbs) if hasattr(cb, cb_func)]
    107 
--> 108     def __call__(self, event_name): L(event_name).map(self._call_one)
    109     def _call_one(self, event_name):
    110         assert hasattr(event, event_name)

c:\work\ml\fastcore\fastcore\foundation.py in map(self, f, *args, **kwargs)
    360              else f.format if isinstance(f,str)
    361              else f.__getitem__)
--> 362         return self._new(map(g, self))
    363 
    364     def filter(self, f, negate=False, **kwargs):

c:\work\ml\fastcore\fastcore\foundation.py in _new(self, items, *args, **kwargs)
    313     @property
    314     def _xtra(self): return None
--> 315     def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
    316     def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
    317     def copy(self): return self._new(self.items.copy())

c:\work\ml\fastcore\fastcore\foundation.py in __call__(cls, x, *args, **kwargs)
     39             return x
     40 
---> 41         res = super().__call__(*((x,) + args), **kwargs)
     42         res._newchk = 0
     43         return res

c:\work\ml\fastcore\fastcore\foundation.py in __init__(self, items, use_list, match, *rest)
    304         if items is None: items = []
    305         if (use_list is not None) or not _is_array(items):
--> 306             items = list(items) if use_list else _listify(items)
    307         if match is not None:
    308             if is_coll(match): match = len(match)

c:\work\ml\fastcore\fastcore\foundation.py in _listify(o)
    240     if isinstance(o, list): return o
    241     if isinstance(o, str) or _is_array(o): return [o]
--> 242     if is_iter(o): return list(o)
    243     return [o]
    244 

c:\work\ml\fastcore\fastcore\foundation.py in __call__(self, *args, **kwargs)
    206             if isinstance(v,_Arg): kwargs[k] = args.pop(v.i)
    207         fargs = [args[x.i] if isinstance(x, _Arg) else x for x in self.pargs] + args[self.maxi+1:]
--> 208         return self.fn(*fargs, **kwargs)
    209 
    210 # Cell

c:\work\ml\fastai2\fastai2\learner.py in _call_one(self, event_name)
    109     def _call_one(self, event_name):
    110         assert hasattr(event, event_name)
--> 111         [cb(event_name) for cb in sort_by_run(self.cbs)]
    112 
    113     def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)

c:\work\ml\fastai2\fastai2\learner.py in <listcomp>(.0)
    109     def _call_one(self, event_name):
    110         assert hasattr(event, event_name)
--> 111         [cb(event_name) for cb in sort_by_run(self.cbs)]
    112 
    113     def _bn_bias_state(self, with_bias): return bn_bias_params(self.model, with_bias).map(self.opt.state)

c:\work\ml\fastai2\fastai2\callback\core.py in __call__(self, event_name)
     21         _run = (event_name not in _inner_loop or (self.run_train and getattr(self, 'training', True)) or
     22                (self.run_valid and not getattr(self, 'training', False)))
---> 23         if self.run and _run: getattr(self, event_name, noop)()
     24         if event_name=='after_fit': self.run=True #Reset self.run to True at each end of fit
     25 

c:\work\ml\fastai2\fastai2\callback\core.py in after_fit(self)
     93         "Concatenate all recorded tensors"
     94         if self.with_input:     self.inputs  = detuplify(to_concat(self.inputs, dim=self.concat_dim))
---> 95         if not self.save_preds: self.preds   = detuplify(to_concat(self.preds, dim=self.concat_dim))
     96         if not self.save_targs: self.targets = detuplify(to_concat(self.targets, dim=self.concat_dim))
     97         if self.with_loss:      self.losses  = to_concat(self.losses)

c:\work\ml\fastai2\fastai2\torch_core.py in to_concat(xs, dim)
    211 def to_concat(xs, dim=0):
    212     "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213     if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
    214     if isinstance(xs[0],dict):  return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs.keys()}
    215     #We may receives xs that are not concatenatable (inputs of a text classifier for instance),

c:\work\ml\fastai2\fastai2\torch_core.py in <listcomp>(.0)
    211 def to_concat(xs, dim=0):
    212     "Concat the element in `xs` (recursively if they are tuples/lists of tensors)"
--> 213     if is_listy(xs[0]): return type(xs[0])([to_concat([x[i] for x in xs], dim=dim) for i in range_of(xs[0])])
    214     if isinstance(xs[0],dict):  return {k: to_concat([x[k] for x in xs], dim=dim) for k in xs.keys()}
    215     #We may receives xs that are not concatenatable (inputs of a text classifier for instance),

c:\work\ml\fastai2\fastai2\torch_core.py in to_concat(xs, dim)
    217     try:    return retain_type(torch.cat(xs, dim=dim), xs[0])
    218     except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
--> 219                           for i in range_of(o_)) for o_ in xs], L())
    220 
    221 # Cell

c:\work\ml\fastai2\fastai2\torch_core.py in <listcomp>(.0)
    217     try:    return retain_type(torch.cat(xs, dim=dim), xs[0])
    218     except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
--> 219                           for i in range_of(o_)) for o_ in xs], L())
    220 
    221 # Cell

c:\work\ml\fastcore\fastcore\foundation.py in __call__(cls, x, *args, **kwargs)
     39             return x
     40 
---> 41         res = super().__call__(*((x,) + args), **kwargs)
     42         res._newchk = 0
     43         return res

c:\work\ml\fastcore\fastcore\foundation.py in __init__(self, items, use_list, match, *rest)
    304         if items is None: items = []
    305         if (use_list is not None) or not _is_array(items):
--> 306             items = list(items) if use_list else _listify(items)
    307         if match is not None:
    308             if is_coll(match): match = len(match)

c:\work\ml\fastcore\fastcore\foundation.py in _listify(o)
    240     if isinstance(o, list): return o
    241     if isinstance(o, str) or _is_array(o): return [o]
--> 242     if is_iter(o): return list(o)
    243     return [o]
    244 

c:\work\ml\fastai2\fastai2\torch_core.py in <genexpr>(.0)
    217     try:    return retain_type(torch.cat(xs, dim=dim), xs[0])
    218     except: return sum([L(retain_type(o_.index_select(dim, tensor(i)).squeeze(dim), xs[0])
--> 219                           for i in range_of(o_)) for o_ in xs], L())
    220 
    221 # Cell

RuntimeError: Expected object of scalar type Long but got scalar type Int for argument #3 'index' in call to _th_index_select

Fixed this by fixing the length of the tensor of the second element I return from my model. So instead of returning a tensor with variable length, I always return a tensor of size 1000. Now everything lines up.

1 Like