def create_model(self, X, Z, n_dim, n_out, n_chan=1):
# params
n_lat = 100 # latent variables
n_g_hid1 = 1024 # size of hidden layer in generator layer 1
n_g_hid2 = 128 # size of hidden layer in generator layer 2
n_out = n_dim * n_dim * n_chan # total dimensionality of output
if self.model == 'gaussian':
raise Exception('Gaussian variables currently nor supported in GAN')
# create the generator network
l_g_in = lasagne.layers.InputLayer(shape=(None, n_lat), input_var=Z)
l_g_hid1 = batch_norm(lasagne.layers.DenseLayer(l_g_in, n_g_hid1))
l_g_hid2 = batch_norm(lasagne.layers.DenseLayer(l_g_hid1, n_g_hid2*7*7))
l_g_hid2 = lasagne.layers.ReshapeLayer(l_g_hid2, ([0], n_g_hid2, 7, 7))
l_g_dc1 = batch_norm(Deconv2DLayer(l_g_hid2, 64, 5, stride=2, pad=2))
l_g = Deconv2DLayer(l_g_dc1, n_chan, 5, stride=2, pad=2,
nonlinearity=lasagne.nonlinearities.sigmoid)
print ("Generator output:", l_g.output_shape)
# create the discriminator network
lrelu = lasagne.nonlinearities.LeakyRectify(0.2)
l_d_in = lasagne.layers.InputLayer(shape=(None, n_chan, n_dim, n_dim),
input_var=X)
l_d_hid1 = batch_norm(lasagne.layers.Conv2DLayer(
l_d_in, num_filters=64, filter_size=5, stride=2, pad=2,
nonlinearity=lrelu, name='l_d_hid1'))
l_d_hid2 = batch_norm(lasagne.layers.Conv2DLayer(
l_d_hid1, num_filters=128, filter_size=5, stride=2, pad=2,
nonlinearity=lrelu, name='l_d_hid2'))
l_d_hid3 = batch_norm(lasagne.layers.DenseLayer(l_d_hid2, 1024, nonlinearity=lrelu))
l_d = lasagne.layers.DenseLayer(l_d_hid3, 1, nonlinearity=lasagne.nonlinearities.sigmoid)
print ("Discriminator output:", l_d.output_shape)
return l_g, l_d
评论列表
文章目录