RuntimeError: mat1 dim 1 must match mat2 dim 0

Hi there, cant figure out this error, help please.
I guess I am not doing something correctly, then when I begin training I get this:

model = densenet161(pretrained = True)
model.features[0] = nn.Conv2d(1, 96, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)
model_ft.classifier = nn.Linear(1024, 22)

learn = Learner(dls, 
                model, 
                opt_func  = opt_func,
                act_cls   = act_fn,
                metrics   = [error_rate, accuracy],
                cbs       = [SaveModelCallback(monitor='accuracy')],
                loss_func = LabelSmoothingCrossEntropy())

learn.freeze()
learn.fit_one_cycle(6, lr_max=slice(3e-3))

---------------------------------------------------------------------------
RuntimeError                              Traceback (most recent call last)
<ipython-input-49-a5de060fe987> in <module>
      1 learn2.freeze()
----> 2 learn2.fit_one_cycle(5, lr_max=slice(3e-3))

~/miniconda3/envs/torch/lib/python3.8/site-packages/fastcore/utils.py in _f(*args, **kwargs)
    429         init_args.update(log)
    430         setattr(inst, 'init_args', init_args)
--> 431         return inst if to_return else f(*args, **kwargs)
    432     return _f
    433 

~/miniconda3/envs/torch/lib/python3.8/site-packages/fastai2/callback/schedule.py in fit_one_cycle(self, n_epoch, lr_max, div, div_final, pct_start, wd, moms, cbs, reset_opt)
    111     scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
    112               'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 113     self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
    114 
    115 # Cell

~/miniconda3/envs/torch/lib/python3.8/site-packages/fastcore/utils.py in _f(*args, **kwargs)
    429         init_args.update(log)
    430         setattr(inst, 'init_args', init_args)
--> 431         return inst if to_return else f(*args, **kwargs)
    432     return _f
    433 

~/miniconda3/envs/torch/lib/python3.8/site-packages/fastai2/learner.py in fit(self, n_epoch, lr, wd, cbs, reset_opt)
    201                     try:
    202                         self.epoch=epoch;          self('begin_epoch')
--> 203                         self._do_epoch_train()
    204                         self._do_epoch_validate()
    205                     except CancelEpochException:   self('after_cancel_epoch')

~/miniconda3/envs/torch/lib/python3.8/site-packages/fastai2/learner.py in _do_epoch_train(self)
    173         try:
    174             self.dl = self.dls.train;                        self('begin_train')
--> 175             self.all_batches()
    176         except CancelTrainException:                         self('after_cancel_train')
    177         finally:                                             self('after_train')

~/miniconda3/envs/torch/lib/python3.8/site-packages/fastai2/learner.py in all_batches(self)
    151     def all_batches(self):
    152         self.n_iter = len(self.dl)
--> 153         for o in enumerate(self.dl): self.one_batch(*o)
    154 
    155     def one_batch(self, i, b):

~/miniconda3/envs/torch/lib/python3.8/site-packages/fastai2/learner.py in one_batch(self, i, b)
    157         try:
    158             self._split(b);                                  self('begin_batch')
--> 159             self.pred = self.model(*self.xb);                self('after_pred')
    160             if len(self.yb) == 0: return
    161             self.loss = self.loss_func(self.pred, *self.yb); self('after_loss')

~/miniconda3/envs/torch/lib/python3.8/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
    720             result = self._slow_forward(*input, **kwargs)
    721         else:
--> 722             result = self.forward(*input, **kwargs)
    723         for hook in itertools.chain(
    724                 _global_forward_hooks.values(),

~/miniconda3/envs/torch/lib/python3.8/site-packages/torch/nn/modules/container.py in forward(self, input)
    115     def forward(self, input):
    116         for module in self:
--> 117             input = module(input)
    118         return input
    119 

~/miniconda3/envs/torch/lib/python3.8/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
    720             result = self._slow_forward(*input, **kwargs)
    721         else:
--> 722             result = self.forward(*input, **kwargs)
    723         for hook in itertools.chain(
    724                 _global_forward_hooks.values(),

~/miniconda3/envs/torch/lib/python3.8/site-packages/torch/nn/modules/container.py in forward(self, input)
    115     def forward(self, input):
    116         for module in self:
--> 117             input = module(input)
    118         return input
    119 

~/miniconda3/envs/torch/lib/python3.8/site-packages/torch/nn/modules/module.py in _call_impl(self, *input, **kwargs)
    720             result = self._slow_forward(*input, **kwargs)
    721         else:
--> 722             result = self.forward(*input, **kwargs)
    723         for hook in itertools.chain(
    724                 _global_forward_hooks.values(),

~/miniconda3/envs/torch/lib/python3.8/site-packages/torch/nn/modules/linear.py in forward(self, input)
     89 
     90     def forward(self, input: Tensor) -> Tensor:
---> 91         return F.linear(input, self.weight, self.bias)
     92 
     93     def extra_repr(self) -> str:

~/miniconda3/envs/torch/lib/python3.8/site-packages/torch/nn/functional.py in linear(input, weight, bias)
   1672     if input.dim() == 2 and bias is not None:
   1673         # fused op is marginally faster
-> 1674         ret = torch.addmm(bias, input, weight.t())
   1675     else:
   1676         output = input.matmul(weight.t())

RuntimeError: mat1 dim 1 must match mat2 dim 0

Hola Oscar,

model_ft.classifier = nn.Linear(1024, 22)

This statement does not make sense. There is a model.classifier layer that would be changed to give a different number of output classes. Perhaps you mean
model.classifier = nn.Linear(1024, 22)

Whenever I run into a size mismatch between layers, I first try learn.summary (model_summary in fastai2). It shows, via output or error, the output shape that each layer sends to the next layer. From this I can see whether my changes to layers are correct.

HTH, Malcolm

Hi @Pomo,

Thanks for responding, I did not know about the model.summary(), thanks!

I am Getting this error

TypeError: forward() missing 1 required positional argument: ‘input’

but only happens with the 201, with the other ones 161, etc i dont get this error.

If you would post an example notebook that generates the error, I will try to help.

hi @Pomo ,

thanks, but cant share the code.