def get_output_for(self,input, **kwargs):
if input.ndim > 2:
input = input.flatten(2)
activation = T.dot(input, self.W*self.weights_mask)
if self.b is not None:
activation = activation + self.b.dimshuffle('x', 0)
return self.nonlinearity(activation)
# Conditioning Masked Layer
# Currently not used.
# class CML(MaskedLayer):
# def __init__(self, incoming, num_units, mask_generator,use_cond_mask=False,U=lasagne.init.GlorotUniform(),W=lasagne.init.GlorotUniform(),
# b=init.Constant(0.), nonlinearity=lasagne.nonlinearities.rectify, **kwargs):
# super(CML, self).__init__(incoming, num_units, mask_generator,W,
# b, nonlinearity,**kwargs)
# self.use_cond_mask=use_cond_mask
# if use_cond_mask:
# self.U = self.add_param(spec = U,
# shape = (num_inputs, num_units),
# name='U',
# trainable=True,
# regularizable=False)theano.shared(value=self.weights_initialization((self.n_in, self.n_out)), name=self.name+'U', borrow=True)
# self.add_param(self.U,name =
# def get_output_for(self,input,**kwargs):
# lin = self.lin_output = T.dot(input, self.W * self.weights_mask) + self.b
# if self.use_cond_mask:
# lin = lin+T.dot(T.ones_like(input), self.U * self.weights_mask)
# return lin if self._activation is None else self._activation(lin)
# Made layer, adopted from M.Germain
评论列表
文章目录