@@ -256,8 +256,9 @@ def forward(self, x):
256256class EvoNorm2dS1 (nn .Module ):
257257 def __init__ (
258258 self , num_features , groups = 32 , group_size = None ,
259- apply_act = True , act_layer = nn . SiLU , eps = 1e-5 , ** _ ):
259+ apply_act = True , act_layer = None , eps = 1e-5 , ** _ ):
260260 super ().__init__ ()
261+ act_layer = act_layer or nn .SiLU
261262 self .apply_act = apply_act # apply activation (non-linearity)
262263 if act_layer is not None and apply_act :
263264 self .act = create_act_layer (act_layer )
@@ -290,7 +291,7 @@ def forward(self, x):
290291class EvoNorm2dS1a (EvoNorm2dS1 ):
291292 def __init__ (
292293 self , num_features , groups = 32 , group_size = None ,
293- apply_act = True , act_layer = nn . SiLU , eps = 1e-3 , ** _ ):
294+ apply_act = True , act_layer = None , eps = 1e-3 , ** _ ):
294295 super ().__init__ (
295296 num_features , groups = groups , group_size = group_size , apply_act = apply_act , act_layer = act_layer , eps = eps )
296297
@@ -305,8 +306,9 @@ def forward(self, x):
305306class EvoNorm2dS2 (nn .Module ):
306307 def __init__ (
307308 self , num_features , groups = 32 , group_size = None ,
308- apply_act = True , act_layer = nn . SiLU , eps = 1e-5 , ** _ ):
309+ apply_act = True , act_layer = None , eps = 1e-5 , ** _ ):
309310 super ().__init__ ()
311+ act_layer = act_layer or nn .SiLU
310312 self .apply_act = apply_act # apply activation (non-linearity)
311313 if act_layer is not None and apply_act :
312314 self .act = create_act_layer (act_layer )
@@ -338,7 +340,7 @@ def forward(self, x):
338340class EvoNorm2dS2a (EvoNorm2dS2 ):
339341 def __init__ (
340342 self , num_features , groups = 32 , group_size = None ,
341- apply_act = True , act_layer = nn . SiLU , eps = 1e-3 , ** _ ):
343+ apply_act = True , act_layer = None , eps = 1e-3 , ** _ ):
342344 super ().__init__ (
343345 num_features , groups = groups , group_size = group_size , apply_act = apply_act , act_layer = act_layer , eps = eps )
344346
0 commit comments