I seem to be getting a weird error when calling the fit function on language model learner. Any help in this regard would be highly appreciated!
TypeError Traceback (most recent call last)
<ipython-input-13-08ddcd7c7a23> in <module>()
1 lr=1e-3
2 lrs = lr
----> 3 learner.fit(lrs/2, 1, wds=wd, use_clr=(32,2), cycle_len=1)
/usr/local/lib/python3.6/dist-packages/fastai/learner.py in fit(self, lrs, n_cycle, wds, **kwargs)
249 """
250 self.sched = None
--> 251 layer_opt = self.get_layer_opt(lrs, wds)
252 return self.fit_gen(self.model, self.data, layer_opt, n_cycle, **kwargs)
253
/usr/local/lib/python3.6/dist-packages/fastai/learner.py in get_layer_opt(self, lrs, wds)
221 An instance of a LayerOptimizer
222 """
--> 223 return LayerOptimizer(self.opt_fn, self.get_layer_groups(), lrs, wds)
224
225 def fit(self, lrs, n_cycle, wds=None, **kwargs):
/usr/local/lib/python3.6/dist-packages/fastai/layer_optimizer.py in __init__(self, opt_fn, layer_groups, lrs, wds)
15 if len(wds)==1: wds=wds*len(layer_groups)
16 self.layer_groups,self.lrs,self.wds = layer_groups,lrs,wds
---> 17 self.opt = opt_fn(self.opt_params())
18
19 def opt_params(self):
/usr/local/lib/python3.6/dist-packages/torch/optim/adam.py in __init__(self, params, lr, betas, eps, weight_decay)
27 defaults = dict(lr=lr, betas=betas, eps=eps,
28 weight_decay=weight_decay)
---> 29 super(Adam, self).__init__(params, defaults)
30
31 def step(self, closure=None):
/usr/local/lib/python3.6/dist-packages/torch/optim/optimizer.py in __init__(self, params, defaults)
37
38 for param_group in param_groups:
---> 39 self.add_param_group(param_group)
40
41 def __getstate__(self):
/usr/local/lib/python3.6/dist-packages/torch/optim/optimizer.py in add_param_group(self, param_group)
149 if not isinstance(param, Variable):
150 raise TypeError("optimizer can only optimize Variables, "
--> 151 "but one of the params is " + torch.typename(param))
152 if not param.requires_grad:
153 raise ValueError("optimizing a parameter that doesn't require gradients")
TypeError: optimizer can only optimize Variables, but one of the params is float