Fastai Learner for time series data

I am trying to put my custom time series model based on LSTM cells into Fastai Learner, which requires fastai data loader for the training.

How would you convert small example I have put together?:

import numpy as np
import torch
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.optim as optim

np.random.seed(2)

T = 20
L = 1000
N = 100

x = np.empty((N, L), 'int64')
x[:] = np.array(range(L)) + np.random.randint(-4 * T, 4 * T, N).reshape(N, 1)
y = np.sin(x / 1.0 / T).astype(np.float32)

plt.plot(np.arange(L), y[0, :])


class Sequence(nn.Module):
    def __init__(self):
        super(Sequence, self).__init__()
        self.lstm1 = nn.LSTMCell(1, 51)
        self.lstm2 = nn.LSTMCell(51, 51)
        self.linear = nn.Linear(51, 1)

    def forward(self, input, future = 0):
        outputs = []
        h_t = torch.zeros(input.size(0), 51, dtype=torch.float32)
        c_t = torch.zeros(input.size(0), 51, dtype=torch.float32)
        h_t2 = torch.zeros(input.size(0), 51, dtype=torch.float32)
        c_t2 = torch.zeros(input.size(0), 51, dtype=torch.float32)

        for input_t in input.split(1, dim=1):
            h_t, c_t = self.lstm1(input_t, (h_t, c_t))
            h_t2, c_t2 = self.lstm2(h_t, (h_t2, c_t2))
            output = self.linear(h_t2)
            outputs += [output]
        for i in range(future):# if we should predict the future
            h_t, c_t = self.lstm1(output, (h_t, c_t))
            h_t2, c_t2 = self.lstm2(h_t, (h_t2, c_t2))
            output = self.linear(h_t2)
            outputs += [output]
        outputs = torch.cat(outputs, dim=1)
        return outputs


train_input = torch.from_numpy(y[3:, :-1])
train_target = torch.from_numpy(y[3:, 1:])
test_input = torch.from_numpy(y[:3, :-1])
test_target = torch.from_numpy(y[:3, 1:])


model = Sequence()
criterion = nn.MSELoss()
    # use LBFGS as optimizer since we can load the whole data to train
optimizer = optim.Adam(model.parameters(), lr=0.01)

n_step = 20

for i in range(n_step):
    print(f"taking step {i}")


    def closure():
        optimizer.zero_grad()
        out =  model(train_input)
        loss = criterion(out, train_target)
        print(f"Loss function {loss}")
        loss.backward()
        return loss
    optimizer.step(closure)


with torch.no_grad():
    future = 1000
    pred= model(test_input, future = future) 
    loss = criterion(pred[:, :-future], test_target)
    print("test:", loss.item())
    y = pred.detach.numpy()