@@ -121,6 +121,9 @@ def _cfg(url='', **kwargs):
121121 'maxvit_rmlp_nano_rw_256' : _cfg (
122122 url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_nano_rw_256_sw-c17bb0d6.pth' ,
123123 input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
124+ 'maxvit_rmlp_tiny_rw_256' : _cfg (
125+ url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-weights-maxx/maxvit_rmlp_tiny_rw_256_sw-2da819a5.pth' ,
126+ input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
124127 'maxvit_tiny_pm_256' : _cfg (url = '' , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
125128 'maxxvit_nano_rw_256' : _cfg (url = '' , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
126129
@@ -515,6 +518,13 @@ def _next_cfg(
515518 stem_width = (32 , 64 ),
516519 ** _rw_max_cfg (rel_pos_type = 'mlp' ),
517520 ),
521+ maxvit_rmlp_tiny_rw_256 = MaxxVitCfg (
522+ embed_dim = (64 , 128 , 256 , 512 ),
523+ depths = (2 , 2 , 5 , 2 ),
524+ block_type = ('M' ,) * 4 ,
525+ stem_width = (32 , 64 ),
526+ ** _rw_max_cfg (rel_pos_type = 'mlp' ),
527+ ),
518528 maxvit_tiny_pm_256 = MaxxVitCfg (
519529 embed_dim = (64 , 128 , 256 , 512 ),
520530 depths = (2 , 2 , 5 , 2 ),
@@ -1721,6 +1731,11 @@ def maxvit_rmlp_nano_rw_256(pretrained=False, **kwargs):
17211731 return _create_maxxvit ('maxvit_rmlp_nano_rw_256' , pretrained = pretrained , ** kwargs )
17221732
17231733
1734+ @register_model
1735+ def maxvit_rmlp_tiny_rw_256 (pretrained = False , ** kwargs ):
1736+ return _create_maxxvit ('maxvit_rmlp_tiny_rw_256' , pretrained = pretrained , ** kwargs )
1737+
1738+
17241739@register_model
17251740def maxvit_tiny_pm_256 (pretrained = False , ** kwargs ):
17261741 return _create_maxxvit ('maxvit_tiny_pm_256' , pretrained = pretrained , ** kwargs )
0 commit comments