def __init__(self, incoming, num_units,
W=init.GlorotUniform(), b=init.Constant(0.),
nonlinearity=nonlinearities.rectify, name=None, **kwargs):
"""
An extention of a regular dense layer, enables the sharing of weight between two tied hidden layers. In order
to tie two layers, the first should be initialized with an initialization function for the weights, the other
should get the weight matrix of the first at input
:param incoming: the input layer of this layer
:param num_units: output size
:param W: weight initialization, can be a initialization function or a given matrix
:param b: bias initialization
:param nonlinearity: non linearity function
:param name: string
:param kwargs:
"""
super(TiedDenseLayer, self).__init__(incoming, num_units, W, b, nonlinearity, name=name)
if not isinstance(W, lasagne.init.Initializer):
self.params[self.W].remove('trainable')
self.params[self.W].remove('regularizable')
if self.b and not isinstance(b, lasagne.init.Initializer):
self.params[self.b].remove('trainable')
评论列表
文章目录