def ReLU(name='ReLU'): def layer(x, is_training=True): with tf.variable_op_scope([x], None, name): return tf.nn.relu(x) return layer