Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
priyakasimbeg committed Dec 7, 2023
1 parent 46f5a14 commit 6c9c86f
Showing 1 changed file with 4 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -146,11 +146,11 @@ def __init__(self,
activation_fn = nn.LeakyReLU(negative_slope=0.2, inplace=True)
self.conv_layers = nn.Sequential(
nn.Conv2d(in_chans, out_chans, kernel_size=3, padding=1, bias=False),
norm_layer(out_chans),
nn.GroupNorm(num_groups=1, num_channels=out_chans, eps=1e-6),
activation_fn,
nn.Dropout2d(dropout_rate),
nn.Conv2d(out_chans, out_chans, kernel_size=3, padding=1, bias=False),
norm_layer(out_chans),
nn.GroupNorm(num_groups=1, num_channels=out_chans, eps=1e-6),
activation_fn,
nn.Dropout2d(dropout_rate),
)
Expand All @@ -173,7 +173,7 @@ def __init__(self,
super().__init__()
if use_layer_norm:
size = int(size)
norm_layer = partial(nn.GroupNorm, num_groups=1, eps=1e-6)
norm_layer = nn.GroupNorm(num_groups=1, num_channels=out_chans, eps=1e-6)
else:
norm_layer = nn.InstanceNorm2d
if use_tanh:
Expand All @@ -183,7 +183,7 @@ def __init__(self,
self.layers = nn.Sequential(
nn.ConvTranspose2d(
in_chans, out_chans, kernel_size=2, stride=2, bias=False),
norm_layer(out_chans),
nn.GroupNorm(num_groups=1, num_channels=out_chans, eps=1e-6),
activation_fn,
)

Expand Down

0 comments on commit 6c9c86f

Please sign in to comment.