@oguiza I implemented the Inception module today, it looks like this:
class InceptionModule(nn.Module):
def __init__(self, ni, use_bottleneck=True, kss=[41, 21, 11], bottleneck_size=32, nb_filters=32, stride=1):
super().__init__()
if use_bottleneck:
self.conv0 = nn.Conv1d(ni, bottleneck_size, 1, bias=False)
else:
self.conv0 = noop
self.conv1 = conv(bottleneck_size, nb_filters, kss[0])
self.conv2 = conv(bottleneck_size, nb_filters, kss[1])
self.conv3 = conv(bottleneck_size, nb_filters, kss[2])
self.conv_bottle = nn.Sequential(nn.MaxPool1d(3, stride, padding=1),
nn.Conv1d(bottleneck_size, nb_filters, 1, bias=False))
self.bn_relu = nn.Sequential(nn.BatchNorm1d(4*nb_filters),
nn.ReLU())
def forward(self, x):
x = self.conv0(x)
return self.bn_relu(torch.cat([self.conv1(x), self.conv2(x), self.conv3(x), self.conv_bottle(x)], dim=1))
and to create the network:
def create_inception(ni, nout, kss=[41, 21, 11], stride=1, depth=6, bottleneck_size=32, nb_filters=32,head=True):
layers = [InceptionModule(ni, kss=kss, use_bottleneck=False, stride=stride), MergeLayer(), nn.ReLU()]
layers += (depth-1)*[InceptionModule(4*nb_filters, kss=kss, bottleneck_size=bottleneck_size, stride=stride), MergeLayer(), nn.ReLU()]
head = [AdaptiveConcatPool1d(), Flatten(), nn.Linear(8*nb_filters, nout)] if head else []
return SequentialEx(*layers, *head)
I think it can be simplified a bit. @hfawaz can you check if it is correct? From my initial testings, it is not training that well. The 40 epochs needed for resnet almost don’t do anything to the InceptionTime, probably I have a bug somewhere