123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150 |
- # D:/workplace/python
- # -*- coding: utf-8 -*-
- # @File :mobilenet_base.py
- # @Author:Guido LuXiaohao
- # @Date :2020/4/8
- # @Software:PyCharm
- from keras.layers import Conv2D, DepthwiseConv2D, Dense, GlobalAveragePooling2D
- from keras.layers import Activation, BatchNormalization, Add, Multiply, Reshape
- from keras import backend as K
- class MobileNetBase:
- def __init__(self, shape, n_class, alpha=1.0):
- """Init
- # Arguments
- input_shape: An integer or tuple/list of 3 integers, shape
- of input tensor.
- n_class: Integer, number of classes.
- alpha: Integer, width multiplier.
- """
- self.shape = shape
- self.n_class = n_class
- self.alpha = alpha
- def _relu6(self, x):
- """Relu 6
- """
- return K.relu(x, max_value=6.0)
- def _hard_swish(self, x):
- """Hard swish
- """
- return x * K.relu(x + 3.0, max_value=6.0) / 6.0
- def _return_activation(self, x, nl):
- """Convolution Block
- This function defines a activation choice.
- # Arguments
- x: Tensor, input tensor of conv layer.
- nl: String, nonlinearity activation type.
- # Returns
- Output tensor.
- """
- if nl == 'HS':
- x = Activation(self._hard_swish)(x)
- if nl == 'RE':
- x = Activation(self._relu6)(x)
- if nl == None:
- x = Activation('relu')(x)
- return x
- def _conv_block(self, inputs, filters, kernel, strides, nl):
- """Convolution Block
- This function defines a 2D convolution operation with BN and activation.
- # Arguments
- inputs: Tensor, input tensor of conv layer.
- filters: Integer, the dimensionality of the output space.
- kernel: An integer or tuple/list of 2 integers, specifying the
- width and height of the 2D convolution window.
- strides: An integer or tuple/list of 2 integers,
- specifying the strides of the convolution along the width and height.
- Can be a single integer to specify the same value for
- all spatial dimensions.
- nl: String, nonlinearity activation type.
- # Returns
- Output tensor.
- """
- channel_axis = 1 if K.image_data_format() == 'channels_first' else -1
- x = Conv2D(filters, kernel, padding='same', strides=strides)(inputs)
- x = BatchNormalization(axis=channel_axis)(x)
- return self._return_activation(x, nl)
- def _squeeze(self, inputs):
- """Squeeze and Excitation.
- This function defines a squeeze structure.
- # Arguments
- inputs: Tensor, input tensor of conv layer.
- """
- input_channels = int(inputs.shape[-1])
- x = GlobalAveragePooling2D()(inputs)
- x = Dense(input_channels, activation='relu')(x)
- x = Dense(input_channels, activation='hard_sigmoid')(x)
- x = Reshape((1, 1, input_channels))(x)
- x = Multiply()([inputs, x])
- return x
- def _bottleneck(self, inputs, filters, kernel, e, s, squeeze, nl):
- """Bottleneck
- This function defines a basic bottleneck structure.
- # Arguments
- inputs: Tensor, input tensor of conv layer.
- filters: Integer, the dimensionality of the output space.
- kernel: An integer or tuple/list of 2 integers, specifying the
- width and height of the 2D convolution window.
- e: Integer, expansion factor.
- t is always applied to the input size.
- s: An integer or tuple/list of 2 integers,specifying the strides
- of the convolution along the width and height.Can be a single
- integer to specify the same value for all spatial dimensions.
- squeeze: Boolean, Whether to use the squeeze.
- nl: String, nonlinearity activation type.
- # Returns
- Output tensor.
- """
- channel_axis = 1 if K.image_data_format() == 'channels_first' else -1
- input_shape = K.int_shape(inputs)
- tchannel = int(e)
- cchannel = int(self.alpha * filters)
- r = s == 1 and input_shape[3] == filters
- x = self._conv_block(inputs, tchannel, (1, 1), (1, 1), nl)
- x = DepthwiseConv2D(kernel, strides=(s, s), depth_multiplier=1, padding='same')(x)
- x = BatchNormalization(axis=channel_axis)(x)
- x = self._return_activation(x, nl)
- if squeeze:
- x = self._squeeze(x)
- x = Conv2D(cchannel, (1, 1), strides=(1, 1), padding='same')(x)
- x = BatchNormalization(axis=channel_axis)(x)
- if r:
- x = Add()([x, inputs])
- return x
- def build(self):
- pass
|