def __init__(self, rng, input, filter_shape, image_shape, activation,
padding, W=None, b=None, b_v=0., stride=(1, 1)):
"""Implement a convolution layer. No pooling."""
assert image_shape[1] == filter_shape[1]
self.input = input
self.x = input
print filter_shape, "***********"
fan_in = numpy.prod(filter_shape[1:])
fan_out = (filter_shape[0] * numpy.prod(filter_shape[2:]))
# initialize weights with random weights
W_bound = numpy.sqrt(6. / (fan_in + fan_out))
if rng is None:
rng = numpy.random.RandomState(23455)
if W is None:
W = theano.shared(
numpy.asarray(
rng.uniform(low=-W_bound, high=W_bound, size=filter_shape),
dtype=theano.config.floatX
),
name="w_conv",
borrow=True
)
if b is None:
b_v = (
numpy.ones(
(filter_shape[0],)) * b_v).astype(theano.config.floatX)
b = theano.shared(value=b_v, name="b_conv", borrow=True)
self.W = W
self.b = b
conv_out = conv2d(
input=self.x,
filters=self.W,
input_shape=image_shape,
filter_shape=filter_shape,
border_mode=padding,
subsample=stride
)
linear = conv_out + self.b.dimshuffle('x', 0, 'x', 'x')
if activation is not None:
self.output = activation(linear)
else:
self.output = linear
self.params = [self.W, self.b]
评论列表
文章目录