Initial Commit

This commit is contained in:
Victor Mylle
2023-11-07 18:00:20 +00:00
commit 56c763a6f4
41 changed files with 358954 additions and 0 deletions

3
src/models/__init__.py Normal file
View File

@@ -0,0 +1,3 @@
from .linear_regression import LinearRegression
from .complex_model import TimeSeriesModel
from .non_linear_regression import NonLinearRegression

View File

@@ -0,0 +1,29 @@
import torch
import torch.nn as nn
class TimeSeriesModel(nn.Module):
def __init__(self, input_size, output_size, num_layers=5, hidden_size=128, dropout_rate=0.3):
super(TimeSeriesModel, self).__init__()
self.output_size = output_size
layers = []
for i in range(num_layers):
if i == 0:
layers.append(nn.Linear(input_size, hidden_size))
else:
layers.append(nn.Linear(hidden_size, hidden_size))
layers.append(nn.BatchNorm1d(hidden_size))
layers.append(nn.Dropout(dropout_rate))
layers.append(nn.ReLU())
self.layers = nn.ModuleList(layers)
self.output = nn.Linear(hidden_size, output_size)
def forward(self, x):
x = torch.squeeze(x, -1)
for layer in self.layers:
x = layer(x)
x = self.output(x)
return x

View File

@@ -0,0 +1,14 @@
import torch
class LinearRegression(torch.nn.Module):
def __init__(self, inputSize, output_size):
super(LinearRegression, self).__init__()
self.inputSize = inputSize
self.output_size = output_size
self.linear = torch.nn.Linear(inputSize, output_size)
def forward(self, x):
x = torch.squeeze(x, -1)
out = self.linear(x)
return out

View File

@@ -0,0 +1,31 @@
import torch
class NonLinearRegression(torch.nn.Module):
def __init__(self, inputSize, output_size, hiddenSize=128, numLayers=2):
super(NonLinearRegression, self).__init__()
self.inputSize = inputSize
self.output_size = output_size
self.hiddenSize = hiddenSize
self.numLayers = numLayers
# add linear layers with relu
self.layers = torch.nn.ModuleList()
self.layers.append(torch.nn.Linear(inputSize, hiddenSize))
for _ in range(numLayers - 2):
self.layers.append(torch.nn.Linear(hiddenSize, hiddenSize))
self.layers.append(torch.nn.Linear(hiddenSize, output_size))
self.relu = torch.nn.ReLU()
def forward(self, x):
x = torch.squeeze(x, -1)
for layer in self.layers[:-1]:
x = self.relu(layer(x))
out = self.layers[-1](x)
return out