diff --git a/main.py b/main.py
index 9b2efb85ac3b5f81c7f7edc00f4704ddbc7b06a8..3acf9795c9ed532404c8a86edf90975dc7913905 100644
--- a/main.py
+++ b/main.py
@@ -107,7 +107,7 @@ if __name__ == "__main__":
         print(" Validation : Loss : {:.4f}".format(val_loss))
 
 
-    create_submission.create_submission(network, None)
+    create_submission.create_submission(network, None, device)
     """
     logdir = generate_unique_logpath(top_logdir, "linear")
     print("Logging to {}".format(logdir))
diff --git a/model.py b/model.py
index a40839c7fea2ec3d932515f24fff463ec3f7365a..9a73117b06126bd7792739e846aa606f75300fc5 100644
--- a/model.py
+++ b/model.py
@@ -11,11 +11,28 @@ class LinearRegression(nn.Module):
         super(LinearRegression, self).__init__()
         self.input_size = input_size
         self.bias = cfg["LinearRegression"]["Bias"]
-        self.regressor = nn.Linear(input_size, 1, self.bias)
-        self.activate = nn.ReLU()
+        self.hidden_size = int(cfg["LinearRegression"]["HiddenSize"])
+        self.regressor = nn.Sequential(
+        nn.Linear(input_size,self.hidden_size,self.bias),
+        nn.ReLU(),
+        nn.Linear(self.hidden_size, self.hidden_size, self.bias),
+        nn.ReLU(),
+        nn.Linear(self.hidden_size,self.hidden_size,self.bias),
+        nn.ReLU(),
+        nn.Linear(self.hidden_size,self.hidden_size,self.bias),
+        nn.ReLU(),
+        nn.Linear(self.hidden_size,self.hidden_size,self.bias),
+        nn.ReLU(),
+        nn.Linear(self.hidden_size,self.hidden_size,self.bias),
+        nn.ReLU(),
+        nn.Linear(self.hidden_size,self.hidden_size, self.bias),
+        nn.ReLU(),
+        nn.Linear(self.hidden_size,1, self.bias),
+        nn.ReLU()
+    )
+
     def forward(self, x):
-        y = self.regressor(x).view((x.shape[0],-1))
-        return self.activate(y)
+        return self.regressor(x) 
 
 def build_model(cfg, input_size):    
     return eval(f"{cfg['Model']['Name']}(cfg, input_size)")
diff --git a/train.py b/train.py
index cb594f89e13edbbdb4795011b549deaf81288da7..a09e78acf486e87a2393aa4a52de6150990b0a19 100644
--- a/train.py
+++ b/train.py
@@ -41,14 +41,14 @@ def train(model, loader, f_loss, optimizer, device):
         
         Y = list(model.parameters())[0].grad.cpu().tolist()
         
-        gradients.append(np.mean(Y))
-        tar.append(np.mean(outputs.cpu().tolist()))
-        out.append(np.mean(targets.cpu().tolist()))
+        #gradients.append(np.mean(Y))
+        #tar.append(np.mean(outputs.cpu().tolist()))
+        #out.append(np.mean(targets.cpu().tolist()))
         
         optimizer.step()
-    visualize_gradients(gradients)
-    visualize_gradients(tar)
-    visualize_gradients(out)
+    #visualize_gradients(gradients)
+    #visualize_gradients(tar)
+    #visualize_gradients(out)
 
 def visualize_gradients(gradients):
     print(gradients)