@@ -34,10 +34,15 @@ def _cfg(url='', **kwargs):
3434default_cfgs = {
3535 # GPU-Efficient (ResNet) weights
3636 'botnet26t_256' : _cfg (
37- url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/botnet26t_256-a0e6c3b1.pth' ,
37+ url = '' ,
38+ fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
39+ 'botnet50t_256' : _cfg (
40+ url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/botnet50t_256-a0e6c3b1.pth' ,
3841 fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
39- 'botnet50ts_256' : _cfg (url = '' , fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
4042 'eca_botnext26ts_256' : _cfg (
43+ url = '' ,
44+ fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
45+ 'eca_botnext50ts_256' : _cfg (
4146 url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/eca_botnext26ts_256-fb3bf984.pth' ,
4247 fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
4348
@@ -60,6 +65,20 @@ def _cfg(url='', **kwargs):
6065model_cfgs = dict (
6166
6267 botnet26t = ByoModelCfg (
68+ blocks = (
69+ ByoBlockCfg (type = 'bottle' , d = 2 , c = 256 , s = 1 , gs = 0 , br = 0.25 ),
70+ ByoBlockCfg (type = 'bottle' , d = 2 , c = 512 , s = 2 , gs = 0 , br = 0.25 ),
71+ interleave_blocks (types = ('bottle' , 'self_attn' ), d = 2 , c = 1024 , s = 2 , gs = 0 , br = 0.25 ),
72+ ByoBlockCfg (type = 'self_attn' , d = 2 , c = 2048 , s = 2 , gs = 0 , br = 0.25 ),
73+ ),
74+ stem_chs = 64 ,
75+ stem_type = 'tiered' ,
76+ stem_pool = 'maxpool' ,
77+ fixed_input_size = True ,
78+ self_attn_layer = 'bottleneck' ,
79+ self_attn_kwargs = dict ()
80+ ),
81+ botnet50t = ByoModelCfg (
6382 blocks = (
6483 ByoBlockCfg (type = 'bottle' , d = 3 , c = 256 , s = 1 , gs = 0 , br = 0.25 ),
6584 ByoBlockCfg (type = 'bottle' , d = 4 , c = 512 , s = 2 , gs = 0 , br = 0.25 ),
@@ -73,22 +92,23 @@ def _cfg(url='', **kwargs):
7392 self_attn_layer = 'bottleneck' ,
7493 self_attn_kwargs = dict ()
7594 ),
76- botnet50ts = ByoModelCfg (
95+ eca_botnext26ts = ByoModelCfg (
7796 blocks = (
78- ByoBlockCfg (type = 'bottle' , d = 3 , c = 256 , s = 2 , gs = 0 , br = 0.25 ),
79- interleave_blocks ( types = ( 'bottle' , 'self_attn' ), d = 4 , c = 512 , s = 2 , gs = 0 , br = 0.25 ),
80- interleave_blocks (types = ('bottle' , 'self_attn' ), d = 6 , c = 1024 , s = 2 , gs = 0 , br = 0.25 ),
81- interleave_blocks ( types = ( 'bottle' , ' self_attn') , d = 3 , c = 2048 , s = 1 , gs = 0 , br = 0.25 ),
97+ ByoBlockCfg (type = 'bottle' , d = 2 , c = 256 , s = 1 , gs = 16 , br = 0.25 ),
98+ ByoBlockCfg ( type = 'bottle' , d = 2 , c = 512 , s = 2 , gs = 16 , br = 0.25 ),
99+ interleave_blocks (types = ('bottle' , 'self_attn' ), d = 2 , c = 1024 , s = 2 , gs = 16 , br = 0.25 ),
100+ ByoBlockCfg ( type = ' self_attn' , d = 2 , c = 2048 , s = 2 , gs = 16 , br = 0.25 ),
82101 ),
83102 stem_chs = 64 ,
84103 stem_type = 'tiered' ,
85- stem_pool = '' ,
104+ stem_pool = 'maxpool ' ,
86105 fixed_input_size = True ,
87106 act_layer = 'silu' ,
107+ attn_layer = 'eca' ,
88108 self_attn_layer = 'bottleneck' ,
89109 self_attn_kwargs = dict ()
90110 ),
91- eca_botnext26ts = ByoModelCfg (
111+ eca_botnext50ts = ByoModelCfg (
92112 blocks = (
93113 ByoBlockCfg (type = 'bottle' , d = 3 , c = 256 , s = 1 , gs = 16 , br = 0.25 ),
94114 ByoBlockCfg (type = 'bottle' , d = 4 , c = 512 , s = 2 , gs = 16 , br = 0.25 ),
@@ -208,27 +228,37 @@ def _create_byoanet(variant, cfg_variant=None, pretrained=False, **kwargs):
208228@register_model
209229def botnet26t_256 (pretrained = False , ** kwargs ):
210230 """ Bottleneck Transformer w/ ResNet26-T backbone. Bottleneck attn in final two stages.
231+ FIXME 26t variant was mixed up with 50t arch cfg, retraining and determining why so low
211232 """
212233 kwargs .setdefault ('img_size' , 256 )
213234 return _create_byoanet ('botnet26t_256' , 'botnet26t' , pretrained = pretrained , ** kwargs )
214235
215236
216237@register_model
217- def botnet50ts_256 (pretrained = False , ** kwargs ):
218- """ Bottleneck Transformer w/ ResNet50-T backbone, silu act . Bottleneck attn in final two stages.
238+ def botnet50t_256 (pretrained = False , ** kwargs ):
239+ """ Bottleneck Transformer w/ ResNet50-T backbone. Bottleneck attn in final two stages.
219240 """
220241 kwargs .setdefault ('img_size' , 256 )
221- return _create_byoanet ('botnet50ts_256 ' , 'botnet50ts ' , pretrained = pretrained , ** kwargs )
242+ return _create_byoanet ('botnet50t_256 ' , 'botnet50t ' , pretrained = pretrained , ** kwargs )
222243
223244
224245@register_model
225246def eca_botnext26ts_256 (pretrained = False , ** kwargs ):
226247 """ Bottleneck Transformer w/ ResNet26-T backbone, silu act, Bottleneck attn in final two stages.
248+ FIXME 26ts variant was mixed up with 50ts arch cfg, retraining and determining why so low
227249 """
228250 kwargs .setdefault ('img_size' , 256 )
229251 return _create_byoanet ('eca_botnext26ts_256' , 'eca_botnext26ts' , pretrained = pretrained , ** kwargs )
230252
231253
254+ @register_model
255+ def eca_botnext50ts_256 (pretrained = False , ** kwargs ):
256+ """ Bottleneck Transformer w/ ResNet26-T backbone, silu act, Bottleneck attn in final two stages.
257+ """
258+ kwargs .setdefault ('img_size' , 256 )
259+ return _create_byoanet ('eca_botnext50ts_256' , 'eca_botnext50ts' , pretrained = pretrained , ** kwargs )
260+
261+
232262@register_model
233263def halonet_h1 (pretrained = False , ** kwargs ):
234264 """ HaloNet-H1. Halo attention in all stages as per the paper.
0 commit comments