Skip to content

Commit

Permalink
feat: replace batchnorm with layernorm in neural network.
Browse files Browse the repository at this point in the history
  • Loading branch information
ChristianFredrikJohnsen committed Apr 25, 2024
1 parent 1868b7f commit 15ab4ec
Showing 1 changed file with 2 additions and 6 deletions.
8 changes: 2 additions & 6 deletions src/neuralnet/neural_network.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import os

import torch
import torch.nn as nn
import torch.nn.functional as F
Expand All @@ -10,10 +9,10 @@ def __init__(self, hidden_dim):
super().__init__()
self.conv_block = nn.Sequential(
nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=1),
nn.BatchNorm2d(hidden_dim),
nn.LayerNorm([hidden_dim, 3, 3]),
nn.ReLU(),
nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, padding=1),
nn.BatchNorm2d(hidden_dim),
nn.LayerNorm([hidden_dim, 3, 3]),
)

def forward(self, x):
Expand Down Expand Up @@ -46,11 +45,8 @@ def __init__(
padding=1,
), # Convolution matrix
nn.LayerNorm([self.hidden_dimension, 3, 3]), # Layer normalization
# nn.BatchNorm2d(self.hidden_dimension), # Batch normalization
nn.ReLU(), # Activation function
)



self.residual_blocks = nn.ModuleList(
[ResBlock(hidden_dimension) for _ in range(self.res_blocks)]
Expand Down

0 comments on commit 15ab4ec

Please sign in to comment.