def _build(self,input_shape):
_encoder = self.build_encoder(input_shape)
_decoder = self.build_decoder(input_shape)
self.gs = self.build_gs()
self.gs2 = self.build_gs()
x = Input(shape=input_shape)
z = Sequential([flatten, *_encoder, self.gs])(x)
y = Sequential(_decoder)(flatten(z))
z2 = Input(shape=(self.parameters['N'], self.parameters['M']))
y2 = Sequential(_decoder)(flatten(z2))
w2 = Sequential([*_encoder, self.gs2])(flatten(y2))
data_dim = np.prod(input_shape)
def rec(x, y):
#return K.mean(K.binary_crossentropy(x,y))
return bce(K.reshape(x,(K.shape(x)[0],data_dim,)),
K.reshape(y,(K.shape(x)[0],data_dim,)))
def loss(x, y):
return rec(x,y) + self.gs.loss()
self.callbacks.append(LambdaCallback(on_epoch_end=self.gs.cool))
self.callbacks.append(LambdaCallback(on_epoch_end=self.gs2.cool))
self.custom_log_functions['tau'] = lambda: K.get_value(self.gs.tau)
self.loss = loss
self.metrics.append(rec)
self.encoder = Model(x, z)
self.decoder = Model(z2, y2)
self.autoencoder = Model(x, y)
self.autodecoder = Model(z2, w2)
self.net = self.autoencoder
y2_downsample = Sequential([
Reshape((*input_shape,1)),
MaxPooling2D((2,2))
])(y2)
shape = K.int_shape(y2_downsample)[1:3]
self.decoder_downsample = Model(z2, Reshape(shape)(y2_downsample))
self.features = Model(x, Sequential([flatten, *_encoder[:-2]])(x))
if 'lr_epoch' in self.parameters:
ratio = self.parameters['lr_epoch']
else:
ratio = 0.5
self.callbacks.append(
LearningRateScheduler(lambda epoch: self.parameters['lr'] if epoch < self.parameters['full_epoch'] * ratio else self.parameters['lr']*0.1))
self.custom_log_functions['lr'] = lambda: K.get_value(self.net.optimizer.lr)
评论列表
文章目录