@@ -36,22 +36,22 @@ def _cfg(url='', **kwargs):
3636 'botnet26t_256' : _cfg (
3737 url = '' ,
3838 fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
39- 'botnet50t_256 ' : _cfg (
40- url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/botnet50t_256-a0e6c3b1.pth ' ,
39+ 'botnet50ts_256 ' : _cfg (
40+ url = '' ,
4141 fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
4242 'eca_botnext26ts_256' : _cfg (
4343 url = '' ,
4444 fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
45- 'eca_botnext50ts_256' : _cfg (
46- url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/eca_botnext26ts_256-fb3bf984.pth' ,
47- fixed_input_size = True , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 )),
4845
4946 'halonet_h1' : _cfg (url = '' , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 ), min_input_size = (3 , 256 , 256 )),
5047 'halonet26t' : _cfg (
5148 url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/halonet26t_256-9b4bf0b3.pth' ,
5249 input_size = (3 , 256 , 256 ), pool_size = (8 , 8 ), min_input_size = (3 , 256 , 256 )),
53- 'sehalonet33ts' : _cfg (url = '' , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 ), min_input_size = (3 , 256 , 256 )),
54- 'halonet50ts' : _cfg (url = '' , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 ), min_input_size = (3 , 256 , 256 )),
50+ 'sehalonet33ts' : _cfg (
51+ url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/sehalonet33ts_256-87e053f9.pth' ,
52+ input_size = (3 , 256 , 256 ), pool_size = (8 , 8 ), min_input_size = (3 , 256 , 256 ), crop_pct = 0.94 ),
53+ 'halonet50ts' : _cfg (
54+ url = '' , input_size = (3 , 256 , 256 ), pool_size = (8 , 8 ), min_input_size = (3 , 256 , 256 )),
5555 'eca_halonext26ts' : _cfg (
5656 url = 'https://github.com/rwightman/pytorch-image-models/releases/download/v0.1-attn-weights/eca_halonext26ts_256-1e55880b.pth' ,
5757 input_size = (3 , 256 , 256 ), pool_size = (8 , 8 ), min_input_size = (3 , 256 , 256 )),
@@ -78,16 +78,17 @@ def _cfg(url='', **kwargs):
7878 self_attn_layer = 'bottleneck' ,
7979 self_attn_kwargs = dict ()
8080 ),
81- botnet50t = ByoModelCfg (
81+ botnet50ts = ByoModelCfg (
8282 blocks = (
8383 ByoBlockCfg (type = 'bottle' , d = 3 , c = 256 , s = 1 , gs = 0 , br = 0.25 ),
84- ByoBlockCfg ( type = 'bottle' , d = 4 , c = 512 , s = 2 , gs = 0 , br = 0.25 ),
85- interleave_blocks (types = ('bottle' , 'self_attn' ), d = 2 , c = 1024 , s = 2 , gs = 0 , br = 0.25 ),
86- ByoBlockCfg ( type = ' self_attn' , d = 3 , c = 2048 , s = 2 , gs = 0 , br = 0.25 ),
84+ interleave_blocks ( types = ( 'bottle' , 'self_attn' ), every = 4 , d = 4 , c = 512 , s = 2 , gs = 0 , br = 0.25 ),
85+ interleave_blocks (types = ('bottle' , 'self_attn' ), d = 6 , c = 1024 , s = 2 , gs = 0 , br = 0.25 ),
86+ interleave_blocks ( types = ( 'bottle' , ' self_attn') , d = 3 , c = 2048 , s = 2 , gs = 0 , br = 0.25 ),
8787 ),
8888 stem_chs = 64 ,
8989 stem_type = 'tiered' ,
9090 stem_pool = 'maxpool' ,
91+ act_layer = 'silu' ,
9192 fixed_input_size = True ,
9293 self_attn_layer = 'bottleneck' ,
9394 self_attn_kwargs = dict ()
@@ -108,22 +109,6 @@ def _cfg(url='', **kwargs):
108109 self_attn_layer = 'bottleneck' ,
109110 self_attn_kwargs = dict ()
110111 ),
111- eca_botnext50ts = ByoModelCfg (
112- blocks = (
113- ByoBlockCfg (type = 'bottle' , d = 3 , c = 256 , s = 1 , gs = 16 , br = 0.25 ),
114- ByoBlockCfg (type = 'bottle' , d = 4 , c = 512 , s = 2 , gs = 16 , br = 0.25 ),
115- interleave_blocks (types = ('bottle' , 'self_attn' ), d = 2 , c = 1024 , s = 2 , gs = 16 , br = 0.25 ),
116- ByoBlockCfg (type = 'self_attn' , d = 3 , c = 2048 , s = 2 , gs = 16 , br = 0.25 ),
117- ),
118- stem_chs = 64 ,
119- stem_type = 'tiered' ,
120- stem_pool = 'maxpool' ,
121- fixed_input_size = True ,
122- act_layer = 'silu' ,
123- attn_layer = 'eca' ,
124- self_attn_layer = 'bottleneck' ,
125- self_attn_kwargs = dict ()
126- ),
127112
128113 halonet_h1 = ByoModelCfg (
129114 blocks = (
@@ -227,38 +212,31 @@ def _create_byoanet(variant, cfg_variant=None, pretrained=False, **kwargs):
227212
228213@register_model
229214def botnet26t_256 (pretrained = False , ** kwargs ):
230- """ Bottleneck Transformer w/ ResNet26-T backbone. Bottleneck attn in final two stages.
231- FIXME 26t variant was mixed up with 50t arch cfg, retraining and determining why so low
215+ """ Bottleneck Transformer w/ ResNet26-T backbone.
216+ NOTE: this isn't performing well, may remove
232217 """
233218 kwargs .setdefault ('img_size' , 256 )
234219 return _create_byoanet ('botnet26t_256' , 'botnet26t' , pretrained = pretrained , ** kwargs )
235220
236221
237222@register_model
238- def botnet50t_256 (pretrained = False , ** kwargs ):
239- """ Bottleneck Transformer w/ ResNet50-T backbone. Bottleneck attn in final two stages.
223+ def botnet50ts_256 (pretrained = False , ** kwargs ):
224+ """ Bottleneck Transformer w/ ResNet50-T backbone, silu act.
225+ NOTE: this isn't performing well, may remove
240226 """
241227 kwargs .setdefault ('img_size' , 256 )
242- return _create_byoanet ('botnet50t_256 ' , 'botnet50t ' , pretrained = pretrained , ** kwargs )
228+ return _create_byoanet ('botnet50ts_256 ' , 'botnet50ts ' , pretrained = pretrained , ** kwargs )
243229
244230
245231@register_model
246232def eca_botnext26ts_256 (pretrained = False , ** kwargs ):
247- """ Bottleneck Transformer w/ ResNet26-T backbone, silu act, Bottleneck attn in final two stages .
248- FIXME 26ts variant was mixed up with 50ts arch cfg, retraining and determining why so low
233+ """ Bottleneck Transformer w/ ResNet26-T backbone, silu act.
234+ NOTE: this isn't performing well, may remove
249235 """
250236 kwargs .setdefault ('img_size' , 256 )
251237 return _create_byoanet ('eca_botnext26ts_256' , 'eca_botnext26ts' , pretrained = pretrained , ** kwargs )
252238
253239
254- @register_model
255- def eca_botnext50ts_256 (pretrained = False , ** kwargs ):
256- """ Bottleneck Transformer w/ ResNet26-T backbone, silu act, Bottleneck attn in final two stages.
257- """
258- kwargs .setdefault ('img_size' , 256 )
259- return _create_byoanet ('eca_botnext50ts_256' , 'eca_botnext50ts' , pretrained = pretrained , ** kwargs )
260-
261-
262240@register_model
263241def halonet_h1 (pretrained = False , ** kwargs ):
264242 """ HaloNet-H1. Halo attention in all stages as per the paper.
0 commit comments