Biases are NONE

I created a model as in lesson 11, code below :
def conv_layer(ni, nf, ks=3, stride=2, bn=True, **kwargs):
# No bias needed if using bn
layers = [nn.Conv2d(ni, nf, ks, padding=ks//2, stride=stride, bias=not bn),
GeneralReLU(**kwargs)]
if bn: layers.append(BatchNorm(nf))
return nn.Sequential(*layers)

class Lambda(nn.Module):
    def __init__(self, func):
        super().__init__()
        self.func = func
        
    def forward(self,x):
        return self.func(x)
    
def flatten(x): return x.view(x.shape[0], -1)

import math
def prev_pow_2(x): return 2**math.floor(math.log2(x))

def get_cnn_layers(data, nfs, layer, c_in = 3, c_out = 1, **kwargs):
    def f(ni, nf, stride=2): return layer(ni, nf, 3, stride=stride, **kwargs)
    l1 = c_in
    l2 = prev_pow_2(l1*3*3)
    layers =  [f(l1  , l2  , stride=1),
               f(l2  , l2*2, stride=2),
               f(l2*2, l2*4, stride=2)]
    nfs = [l2*4] + nfs
    layers += [f(nfs[i], nfs[i+1]) for i in range(len(nfs)-1)]
    layers += [nn.AdaptiveAvgPool2d(1), Lambda(flatten), 
               nn.Linear(nfs[-1], c_out)]
    return layers

def get_cnn_model(data, nfs, layer, **kwargs):
    return nn.Sequential(*get_cnn_layers(data, nfs, layer, **kwargs)) 

However when i tried to initialize the model with :

def init_cnn(m, uniform = False):
    f = kaiming_uniform_ if uniform else kaiming_normal_
    for l in m:
        if isinstance(l, nn.Sequential):
            f(l[0].weight, a = 0.1)
            l[0].bias.data.zero_()

I got an error saying :

AttributeError: 'NoneType' object has no attribute 'data'

Why are the biases None? Should i just add an if condition to zero biases only if they are not none? And finally does it matter that biases are None?
`

I suspect the default in a conv_layer is to have batchnorm, and hence no bias is needed: bias=not bn is equal to bias= False

def conv_layer(ni, nf, ks=3, stride=2, bn=True, **kwargs):
    # No bias needed if using bn
    layers = [nn.Conv2d(ni, nf, ks, padding=ks//2, stride=stride, bias=not bn),
              GeneralRelu(**kwargs)]
    if bn: layers.append(BatchNorm(nf))
    return nn.Sequential(*layers)

Refer to 07_batchnorm nb here