Skip to content

Commit 240e667

Browse files
author
talrid
committed
Revert "mixer_b16_224_miil"
1 parent 5bcf686 commit 240e667

File tree

1 file changed

+0
-27
lines changed

1 file changed

+0
-27
lines changed

timm/models/mlp_mixer.py

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -60,15 +60,6 @@ def _cfg(url='', **kwargs):
6060
url='https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-vitjx/jx_mixer_l16_224_in21k-846aa33c.pth',
6161
num_classes=21843
6262
),
63-
# Mixer ImageNet-21K-P pretraining
64-
mixer_b16_224_miil_in21k=_cfg(
65-
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil_in21k.pth',
66-
mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear', num_classes=11221,
67-
),
68-
mixer_b16_224_miil=_cfg(
69-
url='https://miil-public-eu.oss-eu-central-1.aliyuncs.com/model-zoo/ImageNet_21K_P/models/timm/mixer_b16_224_miil.pth',
70-
mean=(0, 0, 0), std=(1, 1, 1), crop_pct=0.875, interpolation='bilinear',
71-
),
7263
)
7364

7465

@@ -264,21 +255,3 @@ def mixer_l16_224_in21k(pretrained=False, **kwargs):
264255
model_args = dict(patch_size=16, num_blocks=24, hidden_dim=1024, tokens_dim=512, channels_dim=4096, **kwargs)
265256
model = _create_mixer('mixer_l16_224_in21k', pretrained=pretrained, **model_args)
266257
return model
267-
268-
@register_model
269-
def mixer_b16_224_miil(pretrained=False, **kwargs):
270-
""" Mixer-B/16 224x224. ImageNet-21k pretrained weights.
271-
Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K
272-
"""
273-
model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, tokens_dim=384, channels_dim=3072, **kwargs)
274-
model = _create_mixer('mixer_b16_224_miil', pretrained=pretrained, **model_args)
275-
return model
276-
277-
@register_model
278-
def mixer_b16_224_miil_in21k(pretrained=False, **kwargs):
279-
""" Mixer-B/16 224x224. ImageNet-1k pretrained weights.
280-
Weights taken from: https://github.com/Alibaba-MIIL/ImageNet21K
281-
"""
282-
model_args = dict(patch_size=16, num_blocks=12, hidden_dim=768, tokens_dim=384, channels_dim=3072, **kwargs)
283-
model = _create_mixer('mixer_b16_224_miil_in21k', pretrained=pretrained, **model_args)
284-
return model

0 commit comments

Comments
 (0)