GAN Throwing Mysterious TypeError: forward() missing 1 required positional argument: "input"

Hey All!

I’m somewhat new to fastai, and so when this error popped up I was unable to really uncover why it’s occurring! I’m trying to run a GAN on some spectrograms of audio signals, so that I can grab the inner layers of the GAN to plot the embedded clustering of images. I’m following GAN tutorial in the documentation, with some adaptations for my own data.
My get_items function returns a list of strings with ‘WAV’ in the path name located in the provided path. My get_x function takes a path, and returns an image representation of the input WAV file.

from fastai.vision.all import *
dest_path = "/home/jupyter/data/"
path = Path("{}egyptian_fruit_bat_annotated_tiny".format(dest_path))

dls = DataBlock(blocks = ImageBlock, 
            get_items = get_items, 
            get_x = get_cqt).dataloaders(path, bs = 32)

from fastai.vision.gan import *

generator = basic_generator(15, n_channels = 3, n_extra_layers = 1)
critic = basic_critic(15, n_channels = 3, n_extra_layers = 1, 
                               act_cls=partial(nn.LeakyReLU, negative_slope=0.2))
learner = GANLearner.wgan(dls, generator, critic, switch_eval=False,
                      opt_func = partial(Adam, betas = (0.,0.99)), wd=0.)

learner.recorder.train_metrics = True
learner.recorder.valid_metrics = True

learner.fit_one_cycle(1, 2e-4, wd = 0.)

Here’s the full traceback:

 ---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-25-145922edd5f0> in <module>
  3 
  4 try:
----> 5     learner.fit_one_cycle(1, 2e-4, wd = 0.)
    global learner.fit_one_cycle = <bound method Learner.fit_one_cycle of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    global wd = undefined
  6 except Exception as exc:
  7     tb = IPython.core.ultratb.VerboseTB()

/opt/conda/lib/python3.7/site-packages/fastcore/logargs.py in _f(*args=(<fastai.vision.gan.GANLearner object>, 1, 0.0002), **kwargs={'wd': 0.0})
 54         init_args.update(log)
 55         setattr(inst, 'init_args', init_args)
---> 56         return inst if to_return else f(*args, **kwargs)
    inst = <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>
    global to_return = undefined
    global f = undefined
    args = (<fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>, 1, 0.0002)
    kwargs = {'wd': 0.0}
 57     return _f

/opt/conda/lib/python3.7/site-packages/fastai/callback/schedule.py in fit_one_cycle(self=<fastai.vision.gan.GANLearner object>, n_epoch=1, lr_max=array([0.0002]), div=25.0, div_final=100000.0, pct_start=0.25, wd=0.0, moms=None, cbs=None, reset_opt=False)
111     scheds = {'lr': combined_cos(pct_start, lr_max/div, lr_max, lr_max/div_final),
112               'mom': combined_cos(pct_start, *(self.moms if moms is None else moms))}
--> 113     self.fit(n_epoch, cbs=ParamScheduler(scheds)+L(cbs), reset_opt=reset_opt, wd=wd)
    self.fit = <bound method Learner.fit of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    n_epoch = 1
    cbs = None
    global ParamScheduler = <class 'fastai.callback.schedule.ParamScheduler'>
    scheds = {'lr': <function combine_scheds.<locals>._inner at 0x7f079ffefd40>, 'mom': <function combine_scheds.<locals>._inner at 0x7f06fa38ff80>}
    global L = <class 'fastcore.foundation.L'>
    reset_opt = False
    wd = 0.0
114 
115 # Cell

/opt/conda/lib/python3.7/site-packages/fastcore/logargs.py in _f(*args=(<fastai.vision.gan.GANLearner object>, 1), **kwargs={'cbs': (#1) [ParamScheduler], 'reset_opt': False, 'wd': 0.0})
 54         init_args.update(log)
 55         setattr(inst, 'init_args', init_args)
---> 56         return inst if to_return else f(*args, **kwargs)
    inst = <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>
    global to_return = undefined
    global f = undefined
    args = (<fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>, 1)
    kwargs = {'cbs': (#1) [ParamScheduler], 'reset_opt': False, 'wd': 0.0}
 57     return _f

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in fit(self=<fastai.vision.gan.GANLearner object>, n_epoch=1, lr=None, wd=0.0, cbs=(#1) [ParamScheduler], reset_opt=False)
205             self.opt.set_hypers(lr=self.lr if lr is None else lr)
206             self.n_epoch = n_epoch
--> 207             self._with_events(self._do_fit, 'fit', CancelFitException, self._end_cleanup)
    self._with_events = <bound method Learner._with_events of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    self._do_fit = <bound method Learner._do_fit of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    global CancelFitException = <class 'fastcore.utils.CancelFitException'>
    self._end_cleanup = <bound method Learner._end_cleanup of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
208 
209     def _end_cleanup(self): self.dl,self.xb,self.yb,self.pred,self.loss = None,(None,),(None,),None,None

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _with_events(self=<fastai.vision.gan.GANLearner object>, f=<bound method Learner._do_fit of <fastai.vision.gan.GANLearner object>>, event_type='fit', ex=<class 'fastcore.utils.CancelFitException'>, final=<bound method Learner._end_cleanup of <fastai.vision.gan.GANLearner object>>)
153 
154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
    self = <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>
    f = <bound method Learner._do_fit of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
156         except ex: self(f'after_cancel_{event_type}')
157         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_fit(self=<fastai.vision.gan.GANLearner object>)
195         for epoch in range(self.n_epoch):
196             self.epoch=epoch
--> 197             self._with_events(self._do_epoch, 'epoch', CancelEpochException)
    self._with_events = <bound method Learner._with_events of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    self._do_epoch = <bound method Learner._do_epoch of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    global CancelEpochException = <class 'fastcore.utils.CancelEpochException'>
198 
199     @log_args(but='cbs')

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _with_events(self=<fastai.vision.gan.GANLearner object>, f=<bound method Learner._do_epoch of <fastai.vision.gan.GANLearner object>>, event_type='epoch', ex=<class 'fastcore.utils.CancelEpochException'>, final=<function noop>)
153 
154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
    self = <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>
    f = <bound method Learner._do_epoch of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
156         except ex: self(f'after_cancel_{event_type}')
157         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_epoch(self=<fastai.vision.gan.GANLearner object>)
189 
190     def _do_epoch(self):
--> 191         self._do_epoch_train()
    self._do_epoch_train = <bound method Learner._do_epoch_train of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
192         self._do_epoch_validate()
193 

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_epoch_train(self=<fastai.vision.gan.GANLearner object>)
181     def _do_epoch_train(self):
182         self.dl = self.dls.train
--> 183         self._with_events(self.all_batches, 'train', CancelTrainException)
    self._with_events = <bound method Learner._with_events of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    self.all_batches = <bound method Learner.all_batches of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    global CancelTrainException = <class 'fastcore.utils.CancelTrainException'>
184 
185     def _do_epoch_validate(self, ds_idx=1, dl=None):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _with_events(self=<fastai.vision.gan.GANLearner object>, f=<bound method Learner.all_batches of <fastai.vision.gan.GANLearner object>>, event_type='train', ex=<class 'fastcore.utils.CancelTrainException'>, final=<function noop>)
153 
154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
    self = <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>
    f = <bound method Learner.all_batches of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
156         except ex: self(f'after_cancel_{event_type}')
157         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in all_batches(self=<fastai.vision.gan.GANLearner object>)
159     def all_batches(self):
160         self.n_iter = len(self.dl)
--> 161         for o in enumerate(self.dl): self.one_batch(*o)
    o = (0, (TensorImage([[[[0.1961, 0.2000, 0.1176,  ..., 0.0000, 0.0000, 0.0000],
      [0.2392, 0.2078, 0.1490,  ..., 0.0000, 0.0000, 0.0000],
      [0.2784, 0.2118, 0.1765,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.1686, 0.2157, 0.2667,  ..., 0.0000, 0.0000, 0.0000],
      [0.2431, 0.2235, 0.3059,  ..., 0.0000, 0.0000, 0.0000],
      [0.2863, 0.2745, 0.3255,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.1686, 0.1725, 0.1647],
      [0.0000, 0.0000, 0.0000,  ..., 0.1294, 0.0863, 0.1608],
      [0.0000, 0.0000, 0.0000,  ..., 0.1020, 0.0431, 0.1804],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.3020, 0.2549, 0.2980],
      [0.0000, 0.0000, 0.0000,  ..., 0.2902, 0.2784, 0.3176],
      [0.0000, 0.0000, 0.0000,  ..., 0.2745, 0.3176, 0.3216]]],


    [[[0.0157, 0.0039, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0078, 0.0000, 0.0039,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0118,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.1098, 0.0902, 0.0980,  ..., 0.0000, 0.0000, 0.0000],
      [0.1098, 0.0784, 0.0824,  ..., 0.0000, 0.0000, 0.0000],
      [0.1216, 0.0706, 0.0627,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0039],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.1294, 0.0941, 0.1020],
      [0.0000, 0.0000, 0.0000,  ..., 0.1216, 0.0863, 0.1176],
      [0.0000, 0.0000, 0.0000,  ..., 0.1098, 0.0902, 0.1255]]],


    [[[0.0196, 0.1098, 0.0471,  ..., 0.0000, 0.0000, 0.0000],
      [0.0196, 0.1098, 0.0706,  ..., 0.0000, 0.0000, 0.0000],
      [0.0157, 0.0863, 0.0902,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.2157, 0.2353, 0.2431,  ..., 0.0000, 0.0000, 0.0000],
      [0.1922, 0.2314, 0.2196,  ..., 0.0000, 0.0000, 0.0000],
      [0.1686, 0.2275, 0.1922,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0745, 0.1176, 0.1137],
      [0.0000, 0.0000, 0.0000,  ..., 0.0627, 0.0980, 0.0824],
      [0.0000, 0.0000, 0.0000,  ..., 0.0549, 0.0784, 0.0588],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.2157, 0.2039, 0.1882],
      [0.0000, 0.0000, 0.0000,  ..., 0.2275, 0.2000, 0.2235],
      [0.0000, 0.0000, 0.0000,  ..., 0.2353, 0.2000, 0.2275]]],


    ...,


    [[[0.0627, 0.0824, 0.1333,  ..., 0.0000, 0.0000, 0.0000],
      [0.0588, 0.0863, 0.1255,  ..., 0.0000, 0.0000, 0.0000],
      [0.0510, 0.0980, 0.1098,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.1843, 0.2157, 0.2157,  ..., 0.0000, 0.0000, 0.0000],
      [0.1882, 0.2157, 0.2078,  ..., 0.0000, 0.0000, 0.0000],
      [0.2000, 0.2157, 0.2000,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.1176, 0.1490, 0.0863],
      [0.0000, 0.0000, 0.0000,  ..., 0.1020, 0.1569, 0.0824],
      [0.0000, 0.0000, 0.0000,  ..., 0.0941, 0.1490, 0.0667],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.1843, 0.1686, 0.1922],
      [0.0000, 0.0000, 0.0000,  ..., 0.1843, 0.1647, 0.1843],
      [0.0000, 0.0000, 0.0000,  ..., 0.1882, 0.1608, 0.1725]]],


    [[[0.0431, 0.0627, 0.0510,  ..., 0.0000, 0.0000, 0.0000],
      [0.0471, 0.0784, 0.0392,  ..., 0.0000, 0.0000, 0.0000],
      [0.0392, 0.0941, 0.0392,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.1451, 0.1176, 0.1333,  ..., 0.0000, 0.0000, 0.0000],
      [0.1569, 0.1216, 0.1451,  ..., 0.0000, 0.0000, 0.0000],
      [0.1647, 0.1137, 0.1373,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.1216, 0.0431, 0.0039],
      [0.0000, 0.0000, 0.0000,  ..., 0.0902, 0.0392, 0.0078],
      [0.0000, 0.0000, 0.0000,  ..., 0.0549, 0.0392, 0.0157],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.1255, 0.1098, 0.0902],
      [0.0000, 0.0000, 0.0000,  ..., 0.1176, 0.1255, 0.0941],
      [0.0000, 0.0000, 0.0000,  ..., 0.0941, 0.1294, 0.0941]]],


    [[[0.2706, 0.1843, 0.1922,  ..., 0.0000, 0.0000, 0.0000],
      [0.2549, 0.1922, 0.2353,  ..., 0.0000, 0.0000, 0.0000],
      [0.2275, 0.2000, 0.2588,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.2000, 0.2902, 0.3098,  ..., 0.0000, 0.0000, 0.0000],
      [0.2157, 0.2118, 0.3059,  ..., 0.0000, 0.0000, 0.0000],
      [0.2588, 0.1451, 0.2980,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000],
      [0.0000, 0.0000, 0.0000,  ..., 0.0000, 0.0000, 0.0000]],

     [[0.0000, 0.0000, 0.0000,  ..., 0.0941, 0.2118, 0.1608],
      [0.0000, 0.0000, 0.0000,  ..., 0.0667, 0.2039, 0.1333],
      [0.0000, 0.0000, 0.0000,  ..., 0.0667, 0.1843, 0.1020],
      ...,
      [0.0000, 0.0000, 0.0000,  ..., 0.2667, 0.2549, 0.2902],
      [0.0000, 0.0000, 0.0000,  ..., 0.2902, 0.2431, 0.2784],
      [0.0000, 0.0000, 0.0000,  ..., 0.3020, 0.2000, 0.2588]]]]),))
    global enumerate = undefined
    self.dl = None
    self.one_batch = <bound method Learner.one_batch of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
162 
163     def _do_one_batch(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in one_batch(self=<fastai.vision.gan.GANLearner object>, i=0, b=(TensorImage([[[[0.1961, 0.2000, 0.1176,  ..., 0....0.0000, 0.0000,  ..., 0.3020, 0.2000, 0.2588]]]]),))
177         self.iter = i
178         self._split(b)
--> 179         self._with_events(self._do_one_batch, 'batch', CancelBatchException)
    self._with_events = <bound method Learner._with_events of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    self._do_one_batch = <bound method Learner._do_one_batch of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
    global CancelBatchException = <class 'fastcore.utils.CancelBatchException'>
180 
181     def _do_epoch_train(self):

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _with_events(self=<fastai.vision.gan.GANLearner object>, f=<bound method Learner._do_one_batch of <fastai.vision.gan.GANLearner object>>, event_type='batch', ex=<class 'fastcore.utils.CancelBatchException'>, final=<function noop>)
153 
154     def _with_events(self, f, event_type, ex, final=noop):
--> 155         try:       self(f'before_{event_type}')       ;f()
    self = <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>
    f = <bound method Learner._do_one_batch of <fastai.vision.gan.GANLearner object at 0x7f06e8a11b50>>
156         except ex: self(f'after_cancel_{event_type}')
157         finally:   self(f'after_{event_type}')        ;final()

/opt/conda/lib/python3.7/site-packages/fastai/learner.py in _do_one_batch(self=<fastai.vision.gan.GANLearner object>)
162 
163     def _do_one_batch(self):
--> 164         self.pred = self.model(*self.xb)
    self.pred = None
    self.model = GANModule(
  (generator): Sequential(
(0): AddChannels()
(1): ConvLayer(
  (0): ConvTranspose2d(100, 128, kernel_size=(4, 4), stride=(1, 1), bias=False)
  (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): ReLU()
)
(2): ConvLayer(
  (0): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
  (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): ReLU()
)
(3): ConvLayer(
  (0): ConvTranspose2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
  (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): ReLU()
)
(4): ConvTranspose2d(64, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
(5): Tanh()
  )
  (critic): Sequential(
(0): ConvLayer(
  (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
  (1): LeakyReLU(negative_slope=0.2)
)
(1): ConvLayer(
  (0): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
  (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): LeakyReLU(negative_slope=0.2)
)
(2): ConvLayer(
  (0): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
  (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): LeakyReLU(negative_slope=0.2)
)
(3): Conv2d(128, 1, kernel_size=(4, 4), stride=(1, 1))
(4): Flatten(full=False)
  )
)
    self.xb = (None,)
165         self('after_pred')
166         if len(self.yb): self.loss = self.loss_func(self.pred, *self.yb)

/opt/conda/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self=GANModule(
  (generator): Sequential(
(0): A...stride=(1, 1))
(4): Flatten(full=False)
  )
), *input=(), **kwargs={})
720             result = self._slow_forward(*input, **kwargs)
721         else:
--> 722             result = self.forward(*input, **kwargs)
    result = undefined
    self.forward = <bound method GANModule.forward of GANModule(
  (generator): Sequential(
(0): AddChannels()
(1): ConvLayer(
  (0): ConvTranspose2d(100, 128, kernel_size=(4, 4), stride=(1, 1), bias=False)
  (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): ReLU()
)
(2): ConvLayer(
  (0): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
  (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): ReLU()
)
(3): ConvLayer(
  (0): ConvTranspose2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
  (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): ReLU()
)
(4): ConvTranspose2d(64, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
(5): Tanh()
  )
  (critic): Sequential(
(0): ConvLayer(
  (0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
  (1): LeakyReLU(negative_slope=0.2)
)
(1): ConvLayer(
  (0): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
  (1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): LeakyReLU(negative_slope=0.2)
)
(2): ConvLayer(
  (0): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
  (1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
  (2): LeakyReLU(negative_slope=0.2)
)
(3): Conv2d(128, 1, kernel_size=(4, 4), stride=(1, 1))
(4): Flatten(full=False)
  )
)>
    input = ()
    kwargs = {}
723         for hook in itertools.chain(
724                 _global_forward_hooks.values(),

/opt/conda/lib/python3.7/site-packages/fastai/vision/gan.py in forward(self=GANModule(
  (generator): Sequential(
(0): A...stride=(1, 1))
(4): Flatten(full=False)
  )
), *args=())
 19 
 20     def forward(self, *args):
---> 21         return self.generator(*args) if self.gen_mode else self.critic(*args)
    self.generator = Sequential(
  (0): AddChannels()
  (1): ConvLayer(
(0): ConvTranspose2d(100, 128, kernel_size=(4, 4), stride=(1, 1), bias=False)
(1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU()
  )
  (2): ConvLayer(
(0): ConvTranspose2d(128, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
(1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU()
  )
  (3): ConvLayer(
(0): ConvTranspose2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): ReLU()
  )
  (4): ConvTranspose2d(64, 3, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
  (5): Tanh()
)
    args = ()
    self.gen_mode = False
    self.critic = Sequential(
  (0): ConvLayer(
(0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
(1): LeakyReLU(negative_slope=0.2)
  )
  (1): ConvLayer(
(0): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): LeakyReLU(negative_slope=0.2)
  )
  (2): ConvLayer(
(0): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
(1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): LeakyReLU(negative_slope=0.2)
  )
  (3): Conv2d(128, 1, kernel_size=(4, 4), stride=(1, 1))
  (4): Flatten(full=False)
)
 22 
 23     def switch(self, gen_mode=None):

/opt/conda/lib/python3.7/site-packages/torch/nn/modules/module.py in _call_impl(self=Sequential(
  (0): ConvLayer(
(0): Conv2d(3,..., 4), stride=(1, 1))
  (4): Flatten(full=False)
), *input=(), **kwargs={})
720             result = self._slow_forward(*input, **kwargs)
721         else:
--> 722             result = self.forward(*input, **kwargs)
    result = undefined
    self.forward = <bound method Sequential.forward of Sequential(
  (0): ConvLayer(
(0): Conv2d(3, 64, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1))
(1): LeakyReLU(negative_slope=0.2)
  )
  (1): ConvLayer(
(0): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)
(1): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): LeakyReLU(negative_slope=0.2)
  )
  (2): ConvLayer(
(0): Conv2d(64, 128, kernel_size=(4, 4), stride=(2, 2), padding=(1, 1), bias=False)
(1): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)
(2): LeakyReLU(negative_slope=0.2)
  )
  (3): Conv2d(128, 1, kernel_size=(4, 4), stride=(1, 1))
  (4): Flatten(full=False)
)>
    input = ()
    kwargs = {}
723         for hook in itertools.chain(
724                 _global_forward_hooks.values(),

TypeError: forward() missing 1 required positional argument: 'input'

I suspect that the error is stemming from my dataloaders object, but I just can’t figure out how to fix this!