Skip to content

Commit

Permalink
Merge pull request #651 from mrT23/master
Browse files Browse the repository at this point in the history
mixer_b16_224 with miil pretraining
  • Loading branch information
rwightman authored May 20, 2021
2 parents cf4ce2f + dc1a4ef commit b4ebf92
Showing 1 changed file with 26 additions and 0 deletions.
26 changes: 26 additions & 0 deletions timm/models/mlp_mixer.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,15 @@ def _cfg(url='', **kwargs):
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_l16_224_in21k-846aa33c.pth',
num_classes=21843
),
# Mixer ImageNet-21K-P pretraining
mixer_b16_224_miil_in21k=_cfg(
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil_in21k.pth',
mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', num_classes=11221,
),
mixer_b16_224_miil=_cfg(
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil.pth',
mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear',
),

gmixer_12_224=_cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
gmixer_24_224=_cfg(mean=IMAGENET_DEFAULT_MEAN, std=IMAGENET_DEFAULT_STD),
Expand Down Expand Up @@ -365,6 +374,23 @@ def mixer_l16_224_in21k(pretrained=False, **kwargs):
model = _create_mixer('mixer_l16_224_in21k', pretrained=pretrained, **model_args)
return model

@register_model
def mixer_b16_224_miil(pretrained=False, **kwargs):
""" Mixer-B/16 224x224. ImageNet-21k pretrained weights.
Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K
"""
model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, **kwargs)
model = _create_mixer('mixer_b16_224_miil', pretrained=pretrained, **model_args)
return model

@register_model
def mixer_b16_224_miil_in21k(pretrained=False, **kwargs):
""" Mixer-B/16 224x224. ImageNet-1k pretrained weights.
Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K
"""
model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, **kwargs)
model = _create_mixer('mixer_b16_224_miil_in21k', pretrained=pretrained, **model_args)
return model

@register_model
def gmixer_12_224(pretrained=False, **kwargs):
Expand Down

0 comments on commit b4ebf92

Please sign in to comment.