def _get_batch_normalization_weights(self,layer_name):
beta = '%s/batch_normalization/beta:0'%(layer_name)
gamma = '%s/batch_normalization/gamma:0'%(layer_name)
mean = '%s/batch_normalization/moving_mean:0'%(layer_name)
variance = '%s/batch_normalization/moving_variance:0'%(layer_name)
if self.weights is None or beta not in self.weights:
print('{:>23} {:>23}'.format(beta, 'using default initializer'))
return None, None, None, None
else:
betax = self.weights[beta]
gammax = self.weights[gamma]
meanx = self.weights[mean]
variancex = self.weights[variance]
self.loaded_weights[beta]=1
self.loaded_weights[gamma]=1
self.loaded_weights[mean]=1
self.loaded_weights[variance]=1
#print('{:>23} {:>23}'.format(beta, 'load from %s'%self.flags.load_path))
return betax,gammax,meanx,variancex
评论列表
文章目录