目录
摘要
如何在Pytorch使用Mish函数
如何在Keras中使用Mish激活函数。
摘要
Diganta Misra的一篇题为“Mish: A Self Regularized Non-Monotonic Neural Activation Function”的新论文介绍了一个新的深度学习激活函数,该函数在最终准确度上比Swish(+.494%)和ReLU(+ 1.671%)都有提高
公式如下:
- 何在Pytorch使用Mish函数
定义Mish函数。
class Mish(torch.nn.Module): def __init__(self): super().__init__() def forward(self, x): x = x * (torch.tanh(torch.nn.functional.softplus(x))) return x
调用函数:
class Path1_64(nn.Module): def __init__(self): super().__init__() self.conv1 = ConvBN(32, 64, 3) self.conv2 = ConvBN(64, 64, [1, 9]) self.conv3 = ConvBN(64, 64, [9, 1]) self.conv4 = ConvBN(64, 64, 1) self.resBlock = ResBlock(ch=64, nblocks=2) self.conv5 = ConvBN(64, 64, [1, 7]) self.conv6 = ConvBN(64, 64, [7, 1]) self.conv7 = ConvBN(64, 64, 1) self.relu = Mish() def forward(self, input): x1 = self.conv1(input) x2 = self.conv2(x1) x3 = self.conv3(x2) x4 = self.conv4(x3) r1 = self.resBlock(x4) x5 = self.conv5(r1) x6 = self.conv6(x5) x7 = self.conv7(x6) x7 = self.relu(x7 + x4) return x7
调用Mish激活函数和调用其他的激活函数一样,直接调用即可。
- 如何在Keras中使用Mish激活函数。
定义Mish激活函数
import tensorflow as tf from tensorflow.python.keras.layers import * from tensorflow.keras.layers import Activation from tensorflow.keras.utils import get_custom_objects class Mish(Activation): def __init__(self, activation, **kwargs): super(Mish, self).__init__(activation, **kwargs) self.__name__ = 'Mish' def mish(inputs): return inputs * tf.math.tanh(tf.math.softplus(inputs)) get_custom_objects().update({'Mish': Mish(mish)})
调用激活函数:
def bn_prelu(x):
x = BatchNormalization(epsilon=1e-5)(x)
x = Activation('Mish')(x)
return x
def build_model(out_dims, input_shape=(100, 100, 3)):
inputs_dim = Input(input_shape)
x = Conv2D(32, (3, 3), strides=(2, 2), padding='same')(inputs_dim)
x = bn_prelu(x)
x = Conv2D(32, (3, 3), strides=(1, 1), padding='same')(x)
x = bn_prelu(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Conv2D(64, (3, 3), strides=(1, 1), padding='same')(x)
x = bn_prelu(x)
x = Conv2D(64, (3, 3), strides=(1, 1), padding='same')(x)
x = bn_prelu(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Conv2D(128, (3, 3), strides=(1, 1), padding='same')(x)
x = bn_prelu(x)
x = Conv2D(128, (3, 3), strides=(1, 1), padding='same')(x)
x = bn_prelu(x)
x = MaxPooling2D(pool_size=(2, 2))(x)
x = Conv2D(256, (3, 3), strides=(1, 1), padding='same')(x)
x = bn_prelu(x)
x = Conv2D(256, (3, 3), strides=(1, 1), padding='same')(x)
x = bn_prelu(x)
x = GlobalAveragePooling2D()(x)
dp_1 = Dropout(0.5)(x)
fc2 = Dense(out_dims)(dp_1)
fc2 = Activation('softmax')(fc2) # 此处注意,为sigmoid函数
model = Model(inputs=inputs_dim, outputs=fc2)
return model
model = build_model(2) # 生成模型
optimizer = Adam(lr=1e-3) # 加入优化器,设置优化器的学习率。
model.compile(optimizer=optimizer, loss='sparse_categorical_crossentropy', metrics=['accuracy'])