Issue Feeding Weight Decay into Bayesian Optimization

Hey all,

I’m attempting to use Bayesian Optimization for my Tabular Learner just like @muellerzr did in this thread: Tuning hyper-parameters with bayesian optimization

I’m receiving the error:

TypeError: object of type ‘numpy.float64’ has no len()

Now, I narrowed this issue down a bit, because I can do this if I setup other parameters such as learning learning rate, and emb_drop, but for some reason, weight decay is the only one giving me issues. I’ll also add that running the fit_with function on its own (not using the bayesian optimizer), I get no errors.
Example is given below. Any thoughts?

This is my function:

def fit_with(wdecay):
bayeslearn = tabular_learner(dls,layers=[500,250],metrics=Accuracy,emb_drop=.1,wd=wdecay)

#Train the model at the specified learning rate
bayeslearn.fit_one_cycle(10,max_lr=.1)
    

#Return accuracy
preds,targs = bayeslearn.get_preds(dl=dls.valid)
acc = Accuracy(preds,targs)

acc = float(acc)
return acc

Setting up parameter bounds
pbounds = {‘wdecay’:(4e-4,0.4)}

Setting up optimizer
bayesoptimizer = BayesianOptimization(
f = fit_with,
pbounds=pbounds,
verbose=2,
random_state=1)

Running it
bayesoptimizer.maximize()

EDIT: Full Error Down Below

| iter | target | wdecay |


KeyError Traceback (most recent call last)
~/anaconda3/lib/python3.8/site-packages/bayes_opt/target_space.py in probe(self, params)
190 try:
–> 191 target = self._cache[_hashable(x)]
192 except KeyError:

KeyError: (0.16704199307914858,)

During handling of the above exception, another exception occurred:

TypeError Traceback (most recent call last)
in
----> 1 bayesoptimizer.maximize()

~/anaconda3/lib/python3.8/site-packages/bayes_opt/bayesian_optimization.py in maximize(self, init_points, n_iter, acq, kappa, kappa_decay, kappa_decay_delay, xi, **gp_params)
183 iteration += 1
184
–> 185 self.probe(x_probe, lazy=False)
186
187 if self._bounds_transformer:

~/anaconda3/lib/python3.8/site-packages/bayes_opt/bayesian_optimization.py in probe(self, params, lazy)
114 self._queue.add(params)
115 else:
–> 116 self._space.probe(params)
117 self.dispatch(Events.OPTIMIZATION_STEP)
118

~/anaconda3/lib/python3.8/site-packages/bayes_opt/target_space.py in probe(self, params)
192 except KeyError:
193 params = dict(zip(self._keys, x))
–> 194 target = self.target_func(**params)
195 self.register(x, target)
196 return target

in fit_with(wdecay)
7 #Train the model at the specified learning rate
8 # with progress_disabled(bayeslearn) as bayeslearn:
----> 9 bayeslearn.fit_one_cycle(10,max_lr=.1)
10
11 #Save,Print, and return the model accuracy

~/anaconda3/lib/python3.8/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

~/anaconda3/lib/python3.8/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
111 scheds = {‘lr’: combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
112 ‘mom’: combined_cos(pct_start, *(self.moms if moms is None else moms))}
–> 113 self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
114
115 # Cell

~/anaconda3/lib/python3.8/site-packages/fastcore/logargs.py in _f(*args, **kwargs)
54 init_args.update(log)
55 setattr(inst, ‘init_args’, init_args)
—> 56 return inst if to_return else f(*args, **kwargs)
57 return _f

~/anaconda3/lib/python3.8/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
202 if reset_opt or not self.opt: self.create_opt()
203 if wd is None: wd = self.wd
–> 204 if wd is not None: self.opt.set_hypers(wd=wd)
205 self.opt.set_hypers(lr=self.lr if lr is None else lr)
206 self.n_epoch = n_epoch

~/anaconda3/lib/python3.8/site-packages/fastai/optimizer.py in set_hypers(self, **kwargs)
32
33 def unfreeze(self): self.freeze_to(0)
—> 34 def set_hypers(self, **kwargs): L(kwargs.items()).starmap(self.set_hyper)
35 def set_hyper(self, k, v):
36 for v
,h in zip(v, self.hypers): h[k] = v_

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in starmap(self, f, *args, **kwargs)
293 def cycle(self): return cycle(self)
294 def map_dict(self, f=noop, *args, **kwargs): return {k:f(k, *args,**kwargs) for k in self}
–> 295 def starmap(self, f, *args, **kwargs): return self._new(itertools.starmap(partial(f,*args,**kwargs), self))
296 def zip(self, cycled=False): return self._new((zip_cycle if cycled else zip)(*self))
297 def zipwith(self, *rest, cycled=False): return self._new([self, *rest]).zip(cycled=cycled)

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in _new(self, items, *args, **kwargs)
216 @property
217 def _xtra(self): return None
–> 218 def _new(self, items, *args, **kwargs): return type(self)(items, *args, use_list=None, **kwargs)
219 def getitem(self, idx): return self._get(idx) if is_indexer(idx) else L(self._get(idx), use_list=None)
220 def copy(self): return self._new(self.items.copy())

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in call(cls, x, *args, **kwargs)
197 def call(cls, x=None, *args, **kwargs):
198 if not args and not kwargs and x is not None and isinstance(x,cls): return x
–> 199 return super().call(x, *args, **kwargs)
200
201 # Cell

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in init(self, items, use_list, match, *rest)
207 if items is None: items = []
208 if (use_list is not None) or not _is_array(items):
–> 209 items = list(items) if use_list else _listify(items)
210 if match is not None:
211 if is_coll(match): match = len(match)

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in _listify(o)
114 if isinstance(o, list): return o
115 if isinstance(o, str) or _is_array(o): return [o]
–> 116 if is_iter(o): return list(o)
117 return [o]
118

~/anaconda3/lib/python3.8/site-packages/fastai/optimizer.py in set_hyper(self, k, v)
41 else: v = [v.stop/10](len(self.param_lists)-1) + [v.stop]
42 v = L(v, use_list=None)
—> 43 if len(v)==1: v = v
len(self.param_lists)
44 assert len(v) == len(self.hypers), f"Trying to set {len(v)} values for {k} but there are {len(self.param_lists)} parameter groups."
45 self._set_hyper(k, v)

~/anaconda3/lib/python3.8/site-packages/fastcore/foundation.py in len(self)
186 "Base class for composing a list of items"
187 def init(self, items): self.items = items
–> 188 def len(self): return len(self.items)
189 def getitem(self, k): return self.items[list(k) if isinstance(k,CollBase) else k]
190 def setitem(self, k, v): self.items[list(k) if isinstance(k,CollBase) else k] = v

TypeError: object of type ‘numpy.float64’ has no len()

We can’t do anything or help without the complete stack trace error.

Sorry!
Updated.

I faced the same problem while using Optuna. It’s related to fastcore. See this issue.

1 Like

Thanks Victor!
I will test out the solution and mark this as a solution if it works out!