I have been working on the kaggle competition
mozilla.pdf (808.8 KB)
I took some liberty to implement it using tabular model.
But I’m getting this error message.
The whole code is attached in the spreadsheet.
RuntimeError Traceback (most recent call last)
in ()
1 learn = get_tabular_learner(data, layers=[200,100], emb_szs={‘content’: 300, ‘title’:300, ‘author’:300, ‘domain’:300, ‘tags’:300, ‘url’:300}, metrics=exp_rmspe)
----> 2 learn.fit(1, 1e-2)
~/fastai/fastai/basic_train.py in fit(self, epochs, lr, wd, callbacks)
136 callbacks = [cb(self) for cb in self.callback_fns] + listify(callbacks)
137 fit(epochs, self.model, self.loss_fn, opt=self.opt, data=self.data, metrics=self.metrics,
–> 138 callbacks=self.callbacks+callbacks)
139
140 def create_opt(self, lr:Floats, wd:Floats=0.)->None:
~/fastai/fastai/basic_train.py in fit(epochs, model, loss_fn, opt, data, callbacks, metrics)
89 except Exception as e:
90 exception = e
—> 91 raise e
92 finally: cb_handler.on_train_end(exception)
93
~/fastai/fastai/basic_train.py in fit(epochs, model, loss_fn, opt, data, callbacks, metrics)
79 for xb,yb in progress_bar(data.train_dl, parent=pbar):
80 xb, yb = cb_handler.on_batch_begin(xb, yb)
—> 81 loss = loss_batch(model, xb, yb, loss_fn, opt, cb_handler)[0]
82 if cb_handler.on_batch_end(loss): break
83
~/fastai/fastai/basic_train.py in loss_batch(model, xb, yb, loss_fn, opt, cb_handler, metrics)
21
22 if not loss_fn: return to_detach(out), yb[0].detach()
—> 23 loss = loss_fn(out, *yb)
24 mets = [f(out,*yb).detach().cpu() for f in metrics] if metrics is not None else []
25
~/anaconda3/lib/python3.7/site-packages/torch/nn/functional.py in cross_entropy(input, target, weight, size_average, ignore_index, reduce, reduction)
1644 if size_average is not None or reduce is not None:
1645 reduction = _Reduction.legacy_get_string(size_average, reduce)
-> 1646 return nll_loss(log_softmax(input, 1), target, weight, None, ignore_index, None, reduction)
1647
1648
~/anaconda3/lib/python3.7/site-packages/torch/nn/functional.py in log_softmax(input, dim, _stacklevel)
1068 if dim is None:
1069 dim = _get_softmax_dim(‘log_softmax’, input.dim(), _stacklevel)
-> 1070 return input.log_softmax(dim)
1071
1072
RuntimeError: Dimension out of range (expected to be in range of [-1, 0], but got 1)