Skip to content

Commit 0f3db97

Browse files
committed
MIsh is now from pytorch
1 parent 0fbd5e6 commit 0f3db97

File tree

2 files changed

+11
-8
lines changed

2 files changed

+11
-8
lines changed

model_constructor/activations.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import torch
33
from torch import nn as nn
44
from torch.nn import functional as F
5+
from torch.nn import Mish
56

67

78
__all__ = ['mish', 'Mish', 'mish_jit', 'MishJit', 'mish_jit_fwd', 'mish_jit_bwd', 'MishJitAutoFn', 'mish_me', 'MishMe',
@@ -16,14 +17,16 @@ def mish(x, inplace: bool = False):
1617
return x.mul(F.softplus(x).tanh())
1718

1819

19-
class Mish(nn.Module):
20-
"""Mish: A Self Regularized Non-Monotonic Neural Activation Function - https://arxiv.org/abs/1908.08681"""
21-
def __init__(self, inplace: bool = False):
22-
"""NOTE: inplace variant not working """
23-
super(Mish, self).__init__()
20+
# from torch v 1.9 Mish is in pytorch.
2421

25-
def forward(self, x):
26-
return mish(x)
22+
# class Mish(nn.Module):
23+
# """Mish: A Self Regularized Non-Monotonic Neural Activation Function - https://arxiv.org/abs/1908.08681"""
24+
# def __init__(self, inplace: bool = False):
25+
# """NOTE: inplace variant not working """
26+
# super(Mish, self).__init__()
27+
28+
# def forward(self, x):
29+
# return mish(x)
2730

2831

2932
@torch.jit.script

model_constructor/yaresnet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from collections import OrderedDict
77
from .layers import SEBlock, ConvLayer, act_fn, noop, SimpleSelfAttention
88
from .net import Net
9-
from .activations import Mish
9+
from torch.nn import Mish
1010

1111

1212
__all__ = ['YaResBlock', 'yaresnet_parameters', 'yaresnet34', 'yaresnet50']

0 commit comments

Comments
 (0)