How to modify default parameters in cnn_learner?

Is there a guide for how to play with the defaults? I want to try different parameters to cnn_learner and see what happens, but I’m not having much luck.

Here is some base code that the course provides:

path = untar_data(URLs.IMAGENETTE_160)

dataloaders = ImageDataLoaders.from_folder(
    path,
    valid='val',
    item_tfms=RandomResizedCrop(128, min_scale=0.35),
    batch_tfms=aug_transforms()
)

learner = cnn_learner(
    dls=dataloaders,
    arch=resnet34,
    metrics=error_rate
)

Then I try to modify loss_func like this:

learner = cnn_learner(
    dls=dataloaders,
    arch=resnet34,
#     opt_func=Adam(),
    loss_func=BCEWithLogitsLossFlat(),
    metrics=error_rate
)

and I get an error like this:

ValueError                                Traceback (most recent call last)
<ipython-input-245-a1d02bbd6880> in <module>
----> 1 learner.fine_tune(1)

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/callback/schedule.py in fine_tune(self, epochs, base_lr, freeze_epochs, lr_mult, pct_start, div, **kwargs)
    155     "Fine tune with `freeze` for `freeze_epochs` then with `unfreeze` from `epochs` using discriminative LR"
    156     self.freeze()
--> 157     self.fit_one_cycle(freeze_epochs, slice(base_lr), pct_start=0.99, **kwargs)
    158     base_lr /= 2
    159     self.unfreeze()

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
    110     scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
    111               'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 112     self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
    113 
    114 # Cell

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
    203             self.opt.set_hypers(lr=self.lr if lr is None else lr)
    204             self.n_epoch = n_epoch
--> 205             self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
    206 
    207     def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_fit(self)
    194         for epoch in range(self.n_epoch):
    195             self.epoch=epoch
--> 196             self._with_events(self._do_epoch, 'epoch', CancelEpochException)
    197 
    198     def fit(self, n_epoch, lr=None, wd=None, cbs=None, reset_opt=False):

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_epoch(self)
    188 
    189     def _do_epoch(self):
--> 190         self._do_epoch_train()
    191         self._do_epoch_validate()
    192 

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_epoch_train(self)
    180     def _do_epoch_train(self):
    181         self.dl = self.dls.train
--> 182         self._with_events(self.all_batches, 'train', CancelTrainException)
    183 
    184     def _do_epoch_validate(self, ds_idx=1, dl=None):

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in all_batches(self)
    158     def all_batches(self):
    159         self.n_iter = len(self.dl)
--> 160         for o in enumerate(self.dl): self.one_batch(*o)
    161 
    162     def _do_one_batch(self):

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in one_batch(self, i, b)
    176         self.iter = i
    177         self._split(b)
--> 178         self._with_events(self._do_one_batch, 'batch', CancelBatchException)
    179 
    180     def _do_epoch_train(self):

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _with_events(self, f, event_type, ex, final)
    152 
    153     def _with_events(self, f, event_type, ex, final=noop):
--> 154         try:       self(f'before_{event_type}')       ;f()
    155         except ex: self(f'after_cancel_{event_type}')
    156         finally:   self(f'after_{event_type}')        ;final()

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/learner.py in _do_one_batch(self)
    163         self.pred = self.model(*self.xb)
    164         self('after_pred')
--> 165         if len(self.yb): self.loss = self.loss_func(self.pred, *self.yb)
    166         self('after_loss')
    167         if not self.training or not len(self.yb): return

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/losses.py in __call__(self, inp, targ, **kwargs)
     31         if targ.dtype in [torch.int8, torch.int16, torch.int32]: targ = targ.long()
     32         if self.flatten: inp = inp.view(-1,inp.shape[-1]) if self.is_2d else inp.view(-1)
---> 33         return self.func.__call__(inp, targ.view(-1) if self.flatten else targ, **kwargs)
     34 
     35 # Cell

~/miniconda3/envs/fastai/lib/python3.8/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
    725             result = self._slow_forward(*input, **kwargs)
    726         else:
--> 727             result = self.forward(*input, **kwargs)
    728         for hook in itertools.chain(
    729                 _global_forward_hooks.values(),

~/miniconda3/envs/fastai/lib/python3.8/site-packages/torch/nn/modules/loss.py in forward(self, input, target)
    627 
    628     def forward(self, input: Tensor, target: Tensor) -> Tensor:
--> 629         return F.binary_cross_entropy_with_logits(input, target,
    630                                                   self.weight,
    631                                                   pos_weight=self.pos_weight,

~/miniconda3/envs/fastai/lib/python3.8/site-packages/torch/nn/functional.py in binary_cross_entropy_with_logits(input, target, weight, size_average, reduce, reduction, pos_weight)
   2568         tens_ops = (input, target)
   2569         if any([type(t) is not Tensor for t in tens_ops]) and has_torch_function(tens_ops):
-> 2570             return handle_torch_function(
   2571                 binary_cross_entropy_with_logits, tens_ops, input, target, weight=weight,
   2572                 size_average=size_average, reduce=reduce, reduction=reduction,

~/miniconda3/envs/fastai/lib/python3.8/site-packages/torch/overrides.py in handle_torch_function(public_api, relevant_args, *args, **kwargs)
   1061         # Use `public_api` instead of `implementation` so __torch_function__
   1062         # implementations can do equality/identity comparisons.
-> 1063         result = overloaded_arg.__torch_function__(public_api, types, args, kwargs)
   1064 
   1065         if result is not NotImplemented:

~/miniconda3/envs/fastai/lib/python3.8/site-packages/fastai/torch_core.py in __torch_function__(self, func, types, args, kwargs)
    317 #         if func.__name__[0]!='_': print(func, types, args, kwargs)
    318 #         with torch._C.DisableTorchFunction(): ret = _convert(func(*args, **(kwargs or {})), self.__class__)
--> 319         ret = super().__torch_function__(func, types, args=args, kwargs=kwargs)
    320         if isinstance(ret, TensorBase): ret.set_meta(self, as_copy=True)
    321         return ret

~/miniconda3/envs/fastai/lib/python3.8/site-packages/torch/tensor.py in __torch_function__(cls, func, types, args, kwargs)
    993 
    994         with _C.DisableTorchFunction():
--> 995             ret = func(*args, **kwargs)
    996             return _convert(ret, cls)
    997 

~/miniconda3/envs/fastai/lib/python3.8/site-packages/torch/nn/functional.py in binary_cross_entropy_with_logits(input, target, weight, size_average, reduce, reduction, pos_weight)
   2578 
   2579     if not (target.size() == input.size()):
-> 2580         raise ValueError("Target size ({}) must be the same as input size ({})".format(target.size(), input.size()))
   2581 
   2582     return torch.binary_cross_entropy_with_logits(input, target, weight, pos_weight, reduction_enum)

ValueError: Target size (torch.Size([64])) must be the same as input size (torch.Size([640]))