Learner with Adadelta

Quick post, will delete if resolved.
Tried creating a custom Learner with Adadelta but got some errors.

learn = Learner(data_bn, model, opt_func=optim.Adadelta, loss_func=F.mse_loss)
TypeError                                 Traceback (most recent call last)
<ipython-input-54-d81c6bd29d71> in <module>()
----> 1 learn.lr_find()

/opt/conda/lib/python3.6/site-packages/fastai/train.py in lr_find(learn, start_lr, end_lr, num_it, stop_div, wd)
     30     cb = LRFinder(learn, start_lr, end_lr, num_it, stop_div)
     31     epochs = int(np.ceil(num_it/len(learn.data.train_dl)))
---> 32     learn.fit(epochs, start_lr, callbacks=[cb], wd=wd)
     34 def to_fp16(learn:Learner, loss_scale:float=None, max_noskip:int=1000, dynamic:bool=True, clip:float=None,

/opt/conda/lib/python3.6/site-packages/fastai/basic_train.py in fit(self, epochs, lr, wd, callbacks)
    194         callbacks = [cb(self) for cb in self.callback_fns] + listify(callbacks)
    195         if defaults.extra_callbacks is not None: callbacks += defaults.extra_callbacks
--> 196         fit(epochs, self, metrics=self.metrics, callbacks=self.callbacks+callbacks)
    198     def create_opt(self, lr:Floats, wd:Floats=0.)->None:

/opt/conda/lib/python3.6/site-packages/fastai/basic_train.py in fit(epochs, learn, callbacks, metrics)
     97             cb_handler.on_epoch_begin()
     98             for xb,yb in progress_bar(learn.data.train_dl, parent=pbar):
---> 99                 xb, yb = cb_handler.on_batch_begin(xb, yb)
    100                 loss = loss_batch(learn.model, xb, yb, learn.loss_func, learn.opt, cb_handler)
    101                 if cb_handler.on_batch_end(loss): break

/opt/conda/lib/python3.6/site-packages/fastai/callback.py in on_batch_begin(self, xb, yb, train)
    276         self.state_dict.update(dict(last_input=xb, last_target=yb, train=train, 
    277             stop_epoch=False, skip_step=False, skip_zero=False, skip_bwd=False))
--> 278         self('batch_begin', mets = not self.state_dict['train'])
    279         return self.state_dict['last_input'], self.state_dict['last_target']

/opt/conda/lib/python3.6/site-packages/fastai/callback.py in __call__(self, cb_name, call_mets, **kwargs)
    248         if call_mets:
    249             for met in self.metrics: self._call_and_update(met, cb_name, **kwargs)
--> 250         for cb in self.callbacks: self._call_and_update(cb, cb_name, **kwargs)
    252     def set_dl(self, dl:DataLoader):

/opt/conda/lib/python3.6/site-packages/fastai/callback.py in _call_and_update(self, cb, cb_name, **kwargs)
    238     def _call_and_update(self, cb, cb_name, **kwargs)->None:
    239         "Call `cb_name` on `cb` and update the inner state."
--> 240         new = ifnone(getattr(cb, f'on_{cb_name}')(**self.state_dict, **kwargs), dict())
    241         for k,v in new.items():
    242             if k not in self.state_dict:

/opt/conda/lib/python3.6/site-packages/fastai/basic_train.py in on_batch_begin(self, train, **kwargs)
    458         if train:
    459             self.lrs.append(self.opt.lr)
--> 460             self.moms.append(self.opt.mom)
    462     def on_backward_begin(self, smooth_loss:Tensor, **kwargs:Any)->None:

/opt/conda/lib/python3.6/site-packages/fastai/callback.py in mom(self)
     82     @property
---> 83     def mom(self)->float:return self._mom[-1]
     84     @mom.setter
     85     def mom(self, val:float)->None:

TypeError: 'NoneType' object is not subscriptable

Am I doing something wrong?

Looks like you might need to wrap it in an OptimWrapper

class OptimWrapper():
"Basic wrapper around `opt` to simplify hyper-parameters changes."
     def __init__(self, opt:optim.Optimizer, wd:Floats=0., true_wd:bool=False, bn_wd:bool=True):

It works in case of cnn_learner with optim_func=optim.Adadelta . But in the code of cnn_learner it creates Learner object just like that and passes optim_func in **kwargs without any OptimWrapper.