Quick post, will delete if resolved.
Tried creating a custom Learner with Adadelta but got some errors.
learn = Learner(data_bn, model, opt_func=optim.Adadelta, loss_func=F.mse_loss)
learn.lr_find()
TypeError Traceback (most recent call last)
<ipython-input-54-d81c6bd29d71> in <module>()
----> 1 learn.lr_find()
/opt/conda/lib/python3.6/site-packages/fastai/train.py in lr_find(learn, start_lr, end_lr, num_it, stop_div, wd)
30 cb = LRFinder(learn, start_lr, end_lr, num_it, stop_div)
31 epochs = int(np.ceil(num_it/len(learn.data.train_dl)))
---> 32 learn.fit(epochs, start_lr, callbacks=[cb], wd=wd)
33
34 def to_fp16(learn:Learner, loss_scale:float=None, max_noskip:int=1000, dynamic:bool=True, clip:float=None,
/opt/conda/lib/python3.6/site-packages/fastai/basic_train.py in fit(self, epochs, lr, wd, callbacks)
194 callbacks = [cb(self) for cb in self.callback_fns] + listify(callbacks)
195 if defaults.extra_callbacks is not None: callbacks += defaults.extra_callbacks
--> 196 fit(epochs, self, metrics=self.metrics, callbacks=self.callbacks+callbacks)
197
198 def create_opt(self, lr:Floats, wd:Floats=0.)->None:
/opt/conda/lib/python3.6/site-packages/fastai/basic_train.py in fit(epochs, learn, callbacks, metrics)
97 cb_handler.on_epoch_begin()
98 for xb,yb in progress_bar(learn.data.train_dl, parent=pbar):
---> 99 xb, yb = cb_handler.on_batch_begin(xb, yb)
100 loss = loss_batch(learn.model, xb, yb, learn.loss_func, learn.opt, cb_handler)
101 if cb_handler.on_batch_end(loss): break
/opt/conda/lib/python3.6/site-packages/fastai/callback.py in on_batch_begin(self, xb, yb, train)
276 self.state_dict.update(dict(last_input=xb, last_target=yb, train=train,
277 stop_epoch=False, skip_step=False, skip_zero=False, skip_bwd=False))
--> 278 self('batch_begin', mets = not self.state_dict['train'])
279 return self.state_dict['last_input'], self.state_dict['last_target']
280
/opt/conda/lib/python3.6/site-packages/fastai/callback.py in __call__(self, cb_name, call_mets, **kwargs)
248 if call_mets:
249 for met in self.metrics: self._call_and_update(met, cb_name, **kwargs)
--> 250 for cb in self.callbacks: self._call_and_update(cb, cb_name, **kwargs)
251
252 def set_dl(self, dl:DataLoader):
/opt/conda/lib/python3.6/site-packages/fastai/callback.py in _call_and_update(self, cb, cb_name, **kwargs)
238 def _call_and_update(self, cb, cb_name, **kwargs)->None:
239 "Call `cb_name` on `cb` and update the inner state."
--> 240 new = ifnone(getattr(cb, f'on_{cb_name}')(**self.state_dict, **kwargs), dict())
241 for k,v in new.items():
242 if k not in self.state_dict:
/opt/conda/lib/python3.6/site-packages/fastai/basic_train.py in on_batch_begin(self, train, **kwargs)
458 if train:
459 self.lrs.append(self.opt.lr)
--> 460 self.moms.append(self.opt.mom)
461
462 def on_backward_begin(self, smooth_loss:Tensor, **kwargs:Any)->None:
/opt/conda/lib/python3.6/site-packages/fastai/callback.py in mom(self)
81
82 @property
---> 83 def mom(self)->float:return self._mom[-1]
84 @mom.setter
85 def mom(self, val:float)->None:
TypeError: 'NoneType' object is not subscriptable
Am I doing something wrong?