Skip to content

Commit

Permalink
feat: ✨ Add DropOut to AttentiveScaleNet
Browse files Browse the repository at this point in the history
  • Loading branch information
rhoadesScholar committed Apr 16, 2024
1 parent 29e42ac commit 5674d58
Showing 1 changed file with 15 additions and 0 deletions.
15 changes: 15 additions & 0 deletions src/leibnetz/nets/attentive_scalenet.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ def build_subnet(
output_nc=1,
base_nc=12,
nc_increase_factor=2,
norm_layer=None,
residual=False,
dropout_prob=None,
):
# define downsample nodes
downsample_factors = np.array(downsample_factors)
Expand All @@ -36,6 +39,9 @@ def build_subnet(
base_nc * nc_increase_factor**i,
kernel_sizes,
identifier=output_key,
norm_layer=norm_layer,
residual=residual,
dropout_prob=dropout_prob,
),
)
c += 1
Expand Down Expand Up @@ -67,6 +73,9 @@ def build_subnet(
base_nc * nc_increase_factor ** (i + 1),
kernel_sizes,
identifier=output_key,
norm_layer=norm_layer,
residual=residual,
dropout_prob=dropout_prob,
)
)
input_key = output_key
Expand Down Expand Up @@ -108,6 +117,9 @@ def build_subnet(
base_nc * nc_increase_factor**i,
kernel_sizes,
identifier=output_key,
norm_layer=norm_layer,
residual=residual,
dropout_prob=dropout_prob,
)
)
input_key = output_key
Expand All @@ -122,6 +134,9 @@ def build_subnet(
# kernel_sizes,
[(1,) * len(top_resolution)],
identifier=f"{subnet_id}_output",
norm_layer=norm_layer,
residual=residual,
dropout_prob=dropout_prob,
)
)
outputs = {
Expand Down

0 comments on commit 5674d58

Please sign in to comment.