Newer
Older
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Function
import torch.nn as nn
class LinearRegression(nn.Module):
def __init__(self, cfg, input_size):
super(LinearRegression, self).__init__()
self.input_size = input_size
self.bias = cfg["LinearRegression"]["Bias"]
self.hidden_size = int(cfg["LinearRegression"]["HiddenSize"])
self.regressor = nn.Sequential(
nn.Linear(input_size,self.hidden_size,self.bias),
nn.ReLU(),
nn.Linear(self.hidden_size, self.hidden_size, self.bias),
nn.ReLU(),
nn.Linear(self.hidden_size,self.hidden_size,self.bias),
nn.ReLU(),
nn.Linear(self.hidden_size,self.hidden_size,self.bias),
nn.ReLU(),
nn.Linear(self.hidden_size,self.hidden_size,self.bias),
nn.ReLU(),
nn.Linear(self.hidden_size,self.hidden_size,self.bias),
nn.ReLU(),
nn.Linear(self.hidden_size,self.hidden_size, self.bias),
nn.ReLU(),
nn.Linear(self.hidden_size,1, self.bias),
nn.ReLU()
)
def build_model(cfg, input_size):
class ModelCheckpoint:
def __init__(self, filepath, model):
self.min_loss = None
self.filepath = filepath
self.model = model
def update(self, loss):
if (self.min_loss is None) or (loss < self.min_loss):
print("Saving a better model")
torch.save(self.model.state_dict(), self.filepath)
self.min_loss = loss