- Notifications
You must be signed in to change notification settings - Fork 1.6k
Closed
Description
class GroupConv(Layer): def __init__( self, layer = None, filters=0, size=3, stride=2, groups=2, act = tf.identity, name ='groupconv', ): Layer.__init__(self, name=name) self.inputs = layer.outputs groupConv = lambda i, k: tf.nn.conv2d(i, k, strides=[1, stride, stride, 1], padding='VALID') channels = int(self.inputs.get_shape()[-1]) with tf.variable_scope(name) as vs: We = tf.get_variable(name='weights', shape=[size, size, channels / groups, filters], initializer=tf.truncated_normal_initializer(stddev=0.03), dtype=tf.float32, trainable=True) bi = tf.get_variable(name='biases', shape=[filters, ], initializer=tf.constant_initializer(value=0.1), dtype=tf.float32, trainable=True) if groups == 1: conv = groupConv(self.inputs, We) else: inputGroups = tf.split(axis=3, num_or_size_splits=groups, value=self.inputs) weightsGroups = tf.split(axis=3, num_or_size_splits=groups, value=We) convGroups = [groupConv(i, k) for i, k in zip(inputGroups, weightsGroups)] conv = tf.concat(axis=3, values=convGroups) if name is not None: conv = tf.add(conv, bi, name=name) else: conv = tf.add(conv, bi) self.outputs =act(conv) self.all_layers = list(layer.all_layers) self.all_params = list(layer.all_params) self.all_drop = dict(layer.all_drop) self.all_layers.extend( [self.outputs] ) self.all_params.extend( [We, bi] )
Caffe中分组卷积的实现,一个减少过拟合的卷积设计。之前孪生网络论文实现用过这个。
Metadata
Metadata
Assignees
Labels
No labels