If res50 defined like this:
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
I use
learn = create_cnn(
data,
resnet50,
ps=0.5,
cut=-2, #!!!
path=path,
metrics=[acc],
)
If senet154 defined like this:
def features(self, x):
x = self.layer0(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
return x
def logits(self, x):
x = self.avg_pool(x)
x = self.dropout(x)
x = x.view(x.size(0), -1)
x = self.last_linear(x)
return x
def forward(self, x):
x = self.features(x)
x = self.logits(x)
return x
I use
learn = create_cnn(
data,
senet154,
ps=0.5,
cut=-3, #!!! because of âx = self.dropout(x)â
path=path,
metrics=[acc],
)
Is the use of âcutâ right? If I need to find the self.avg_pool in every network and then set âcutâ?
I use the senet154 from âfastai/old/fastai/models/senet.pyâ on fastai â1.0.22â.
bs=16
data = ImageDataBunch.create(train_ds, val_ds, test_ds=test_ds, path=path, bs=bs, tfms=(tfms, []), num_workers=8, size=512).normalize(kk)
learn = create_cnn(
data,
senet154,
ps=0.5,
cut=-2,
path=path,
metrics=[acc]
)
learn.model = nn.DataParallel(learn.model)
learn.callback_fns.append(partial(SaveModel, every=âimprovementâ, monitor=âval_lossâ))
learn.fit_one_cycle(5, lrs)
After 1 epoch end, it gives:
âExpected more than 1 value per channel when trainingââŚ