I thought it maybe has something to do with the BCEWithLogitsLossFlat. So, I changed that and added a sigmoid layer. But then get a different error.
class NNet(nn.Module):
def __init__(self, input_dim, output_dim, d_model=10, nhead=1, num_layers=1, ps=0.1):
super().__init__()
encoder_layer = nn.TransformerEncoderLayer(d_model=d_model, nhead=nhead)
self.transformer_encoder = nn.TransformerEncoder(encoder_layer, num_layers=num_layers)
self.layers = nn.Sequential(nn.Dropout(ps),
nn.Linear(input_dim, d_model),
nn.GELU(),
self.transformer_encoder,
nn.utils.weight_norm( nn.Linear(d_model, output_dim)),
nn.Sigmoid()
)
def forward(self, cate_x, cont_x):
#x = torch.Tensor(cont_x)
x = cont_x.unsqueeze(0)
x = self.layers(x)
return x
learn = Learner(dls, model, loss_func=BCELossFlat(),
opt_func=Adam, cbs=CudaCallback)
dl = learn.dls.test_dl(test)
learn.get_preds(dl=dl)
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-19-83a9de402381> in <module>
----> 1 learn.get_preds(dl=dl)
/opt/conda/lib/python3.7/site-packages/fastai/learner.py in get_preds(self, ds_idx, dl, with_input, with_decoded, with_loss, act, inner, reorder, cbs, n_workers, **kwargs)
240 res[pred_i] = act(res[pred_i])
241 if with_decoded: res.insert(pred_i+2, getattr(self.loss_func, 'decodes', noop)(res[pred_i]))
--> 242 if reorder and hasattr(dl, 'get_idxs'): res = nested_reorder(res, tensor(idxs).argsort())
243 return tuple(res)
244 self._end_cleanup()
/opt/conda/lib/python3.7/site-packages/fastai/torch_core.py in nested_reorder(t, idxs)
651 "Reorder all tensors in `t` using `idxs`"
652 if isinstance(t, (Tensor,L)): return t[idxs]
--> 653 elif is_listy(t): return type(t)(nested_reorder(t_, idxs) for t_ in t)
654 if t is None: return t
655 raise TypeError(f"Expected tensor, tuple, list or L but got {type(t)}")
/opt/conda/lib/python3.7/site-packages/fastai/torch_core.py in <genexpr>(.0)
651 "Reorder all tensors in `t` using `idxs`"
652 if isinstance(t, (Tensor,L)): return t[idxs]
--> 653 elif is_listy(t): return type(t)(nested_reorder(t_, idxs) for t_ in t)
654 if t is None: return t
655 raise TypeError(f"Expected tensor, tuple, list or L but got {type(t)}")
/opt/conda/lib/python3.7/site-packages/fastai/torch_core.py in nested_reorder(t, idxs)
650 def nested_reorder(t, idxs):
651 "Reorder all tensors in `t` using `idxs`"
--> 652 if isinstance(t, (Tensor,L)): return t[idxs]
653 elif is_listy(t): return type(t)(nested_reorder(t_, idxs) for t_ in t)
654 if t is None: return t
/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in __getitem__(self, idx)
217 def _xtra(self): return None
218 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
--> 219 def __getitem__(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
220 def copy(self): return self._new(self.items.copy())
221
/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in _get(self, i)
225 return (self.items.iloc[list(i)] if hasattr(self.items,'iloc')
226 else self.items.__array__()[(i,)] if hasattr(self.items,'__array__')
--> 227 else [self.items[i_] for i_ in i])
228
229 def __setitem__(self, idx, o):
/opt/conda/lib/python3.7/site-packages/fastcore/foundation.py in <listcomp>(.0)
225 return (self.items.iloc[list(i)] if hasattr(self.items,'iloc')
226 else self.items.__array__()[(i,)] if hasattr(self.items,'__array__')
--> 227 else [self.items[i_] for i_ in i])
228
229 def __setitem__(self, idx, o):
IndexError: list index out of range