def _init_variables(self):
""" Create the initialization operation for the variables """
# Adam optimizer uses two variables that can only be accessed through the use of a protected
# function since the variables aren't scoped in anyway. Trying to add a tf.variable_scope
# around apply_gradients where the variables are created did not help.
var_list = set(self.optimizer._get_beta_accumulators()) # pylint: disable=protected-access
slot_names = self.optimizer.get_slot_names()
for tower in self.towers:
variables = tower.global_variables
var_list.update(variables)
for slot_name in slot_names:
for variable in variables:
slot = self.optimizer.get_slot(variable, slot_name)
if slot is not None:
var_list.add(slot)
# Initialize all the variables
self.initialization_operation = tf.group(
tf.variables_initializer(var_list),
# Apparently local variables are not part of 'all' variables... go figure
# This is needed for metrics for example
tf.local_variables_initializer())
评论列表
文章目录