python类variables_initializer()的实例源码

dqn_utils.py 文件源码 项目:deep-q-learning 作者: alvinwan 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def initialize_interdependent_variables(session, vars_list, feed_dict):
    """Initialize a list of variables one at a time, which is useful if
    initialization of some variables depends on initialization of the others.
    """
    vars_left = vars_list
    while len(vars_left) > 0:
        new_vars_left = []
        for v in vars_left:
            try:
                # If using an older version of TensorFlow, uncomment the line
                # below and comment out the line after it.
        #session.run(tf.initialize_variables([v]), feed_dict)
                session.run(tf.variables_initializer([v]), feed_dict)
            except tf.errors.FailedPreconditionError:
                new_vars_left.append(v)
        if len(new_vars_left) >= len(vars_left):
            # This can happend if the variables all depend on each other, or more likely if there's
            # another variable outside of the list, that still needs to be initialized. This could be
            # detected here, but life's finite.
            raise Exception("Cycle in variable dependencies, or extenrnal precondition unsatisfied.")
        else:
            vars_left = new_vars_left
utils_tf.py 文件源码 项目:cleverhans 作者: tensorflow 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def initialize_uninitialized_global_variables(sess):
    """
    Only initializes the variables of a TensorFlow session that were not
    already initialized.
    :param sess: the TensorFlow session
    :return:
    """
    # List all global variables
    global_vars = tf.global_variables()

    # Find initialized status for all variables
    is_var_init = [tf.is_variable_initialized(var) for var in global_vars]
    is_initialized = sess.run(is_var_init)

    # List all variables that were not initialized previously
    not_initialized_vars = [var for (var, init) in
                            zip(global_vars, is_initialized) if not init]

    # Initialize all uninitialized variables found, if any
    if len(not_initialized_vars):
        sess.run(tf.variables_initializer(not_initialized_vars))
tensorflow_backend.py 文件源码 项目:keras 作者: GeekLiB 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def _initialize_variables():
    if hasattr(tf, 'global_variables'):
        variables = tf.global_variables()
    else:
        variables = tf.all_variables()

    uninitialized_variables = []
    for v in variables:
        if not hasattr(v, '_keras_initialized') or not v._keras_initialized:
            uninitialized_variables.append(v)
            v._keras_initialized = True
    if uninitialized_variables:
        sess = get_session()
        if hasattr(tf, 'variables_initializer'):
            sess.run(tf.variables_initializer(uninitialized_variables))
        else:
            sess.run(tf.initialize_variables(uninitialized_variables))
dqn_utils.py 文件源码 项目:rl_algorithms 作者: DanielTakeshi 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def initialize_interdependent_variables(session, vars_list, feed_dict):
    """Initialize a list of variables one at a time, which is useful if
    initialization of some variables depends on initialization of the others.
    """
    vars_left = vars_list
    while len(vars_left) > 0:
        new_vars_left = []
        for v in vars_left:
            try:
                # If using an older version of TensorFlow, uncomment the line
                # below and comment out the line after it.
        #session.run(tf.initialize_variables([v]), feed_dict)
                session.run(tf.variables_initializer([v]), feed_dict)
            except tf.errors.FailedPreconditionError:
                new_vars_left.append(v)
        if len(new_vars_left) >= len(vars_left):
            # This can happend if the variables all depend on each other, or more likely if there's
            # another variable outside of the list, that still needs to be initialized. This could be
            # detected here, but life's finite.
            raise Exception("Cycle in variable dependencies, or extenrnal precondition unsatisfied.")
        else:
            vars_left = new_vars_left
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def testComputationSame(self, use_bias):
    """Run through for something with a known answer using SAME padding."""
    conv1 = snt.Conv2D(
        output_channels=1,
        kernel_shape=3,
        stride=1,
        padding=snt.SAME,
        name="conv1",
        use_bias=use_bias,
        initializers=create_constant_initializers(1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 5, 1], dtype=np.float32)))
    expected_out = np.array([[5, 7, 7, 7, 5],
                             [7, 10, 10, 10, 7],
                             [7, 10, 10, 10, 7],
                             [7, 10, 10, 10, 7],
                             [5, 7, 7, 7, 5]])
    if not use_bias:
      expected_out -= 1

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()

      self.assertAllClose(np.reshape(out.eval(), [5, 5]), expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def testComputationValid(self, use_bias):
    """Run through for something with a known answer using snt.VALID padding."""
    conv1 = snt.Conv2D(
        output_channels=1,
        kernel_shape=3,
        stride=1,
        padding=snt.VALID,
        name="conv1",
        use_bias=use_bias,
        initializers=create_constant_initializers(1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 5, 1], dtype=np.float32)))
    expected_output = np.array([[10, 10, 10],
                                [10, 10, 10],
                                [10, 10, 10]])
    if not use_bias:
      expected_output -= 1

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()

      self.assertAllClose(np.reshape(out.eval(), [3, 3]), expected_output)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def testMask2D(self):
    """2D Masks are applied properly."""

    # This mask, applied on an image filled with 1, should result in an image
    # filled with 8 (since we sum 4 elements per channel and there are 2 input
    # channels).
    mask = np.array([[1, 1, 1],
                     [1, 0, 0],
                     [0, 0, 0]], dtype=np.float32)
    inputs = tf.constant(1.0, shape=(1, 5, 5, 2))
    conv1 = snt.Conv2D(
        output_channels=1,
        kernel_shape=3,
        mask=mask,
        padding=snt.VALID,
        use_bias=False,
        initializers=create_constant_initializers(1.0, 0.0, use_bias=False))
    out = conv1(inputs)
    expected_out = np.array([[8] * 3] * 3)
    with self.test_session():
      tf.variables_initializer([conv1.w]).run()
      self.assertAllClose(np.reshape(out.eval(), [3, 3]), expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 39 收藏 0 点赞 0 评论 0
def testMask4D(self):
    """4D Masks are applied properly."""

    # This mask, applied on an image filled with 1, should result in an image
    # filled with 17, as there are 18 weights but we zero out one of them.
    mask = np.ones([3, 3, 2, 1], dtype=np.float32)
    mask[0, 0, 0, :] = 0
    inputs = tf.constant(1.0, shape=(1, 5, 5, 2))
    conv1 = snt.Conv2D(
        output_channels=1,
        kernel_shape=3,
        mask=mask,
        padding=snt.VALID,
        use_bias=False,
        initializers=create_constant_initializers(1.0, 0.0, use_bias=False))
    out = conv1(inputs)
    expected_out = np.array([[17] * 3] * 3)
    with self.test_session():
      tf.variables_initializer([conv1.w]).run()
      self.assertAllClose(np.reshape(out.eval(), [3, 3]), expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 48 收藏 0 点赞 0 评论 0
def testComputationValid(self, use_bias):
    """Run through for something with a known answer using snt.VALID padding."""
    conv1 = snt.Conv1D(
        output_channels=1,
        kernel_shape=3,
        stride=1,
        padding=snt.VALID,
        use_bias=use_bias,
        name="conv1",
        initializers=create_constant_initializers(1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 1], dtype=np.float32)))
    expected_out = np.asarray([4, 4, 4])
    if not use_bias:
      expected_out -= 1

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()

      self.assertAllClose(np.reshape(out.eval(), [3]), expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def testComputation(self, use_bias):
    """Run through for something with a known answer."""
    conv1 = snt.CausalConv1D(
        output_channels=1,
        kernel_shape=3,
        stride=1,
        use_bias=use_bias,
        name="conv1",
        initializers=create_constant_initializers(1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 1], dtype=np.float32)))
    expected_out = np.reshape(np.array([1, 2, 3, 3, 3]), [1, 5, 1])
    if use_bias:
      expected_out += 1

    init_op = tf.variables_initializer(
        [conv1.w, conv1.b] if use_bias else [conv1.w])
    with self.test_session() as sess:
      sess.run(init_op)
      actual_out = sess.run(out)

    self.assertAllClose(actual_out, expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 42 收藏 0 点赞 0 评论 0
def testComputationStrided(self, use_bias):
    """Run through for something with a known answer."""
    conv1 = snt.CausalConv1D(
        output_channels=1,
        kernel_shape=3,
        stride=2,
        use_bias=use_bias,
        name="conv1",
        initializers=create_constant_initializers(1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 1], dtype=np.float32)))
    expected_out = np.reshape(np.array([1, 3, 3]), [1, 3, 1])
    if use_bias:
      expected_out += 1

    init_op = tf.variables_initializer(
        [conv1.w, conv1.b] if use_bias else [conv1.w])
    with self.test_session() as sess:
      sess.run(init_op)
      actual_out = sess.run(out)

    self.assertAllClose(actual_out, expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def testSharing(self, use_bias):
    """Sharing is working."""

    conv1 = snt.InPlaneConv2D(kernel_shape=3, use_bias=use_bias)
    x = np.random.randn(1, 5, 5, 1)
    x1 = tf.constant(x, dtype=np.float32)
    x2 = tf.constant(x, dtype=np.float32)
    out1 = conv1(x1)
    out2 = conv1(x2)

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()
      self.assertAllClose(out1.eval(), out2.eval())

      w = np.random.randn(3, 3, 1, 1)  # Now change the weights.
      conv1.w.assign(w).eval()
      self.assertAllClose(out1.eval(), out2.eval())
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def testShapesNotKnown(self, use_bias):
    """Test that the generated shapes are correct when input shape not known."""

    inputs = tf.placeholder(
        tf.float32, shape=[None, None, None, self.in_channels], name="inputs")

    conv1 = snt.DepthwiseConv2D(
        channel_multiplier=self.channel_multiplier,
        kernel_shape=self.kernel_shape,
        padding=snt.SAME,
        stride=1,
        use_bias=use_bias)
    output = conv1(inputs)

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()
      output_eval = output.eval({inputs: np.zeros(self.input_shape)})
      self.assertEqual(output_eval.shape, tuple(self.output_shape))
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def testComputationSame(self, use_bias):
    """Run through for something with a known answer using SAME padding."""
    conv1 = snt.DepthwiseConv2D(
        channel_multiplier=1,
        kernel_shape=[3, 3],
        stride=1,
        padding=snt.SAME,
        use_bias=use_bias,
        initializers=create_constant_initializers(1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 5, 1], dtype=np.float32)))
    expected_out = np.array([[5, 7, 7, 7, 5],
                             [7, 10, 10, 10, 7],
                             [7, 10, 10, 10, 7],
                             [7, 10, 10, 10, 7],
                             [5, 7, 7, 7, 5]])
    if not use_bias:
      expected_out -= 1

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()

      self.assertAllClose(np.reshape(out.eval(), [5, 5]), expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 53 收藏 0 点赞 0 评论 0
def testComputationValidMultiChannel(self, use_bias):
    """Run through for something with a known answer using snt.VALID padding."""
    conv1 = snt.DepthwiseConv2D(
        channel_multiplier=1,
        kernel_shape=[3, 3],
        stride=1,
        padding=snt.VALID,
        use_bias=use_bias,
        initializers=create_constant_initializers(1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 5, 3], dtype=np.float32)))
    expected_out = np.array([[[10] * 3] * 3] * 3)
    if not use_bias:
      expected_out -= 1

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()

      self.assertAllClose(
          np.reshape(out.eval(), [3, 3, 3]), expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def testSharing(self, use_bias):
    """Sharing is working."""
    conv1 = snt.DepthwiseConv2D(
        channel_multiplier=3, kernel_shape=3, stride=1, padding=snt.SAME,
        use_bias=use_bias)

    x = np.random.randn(1, 5, 5, 1)
    x1 = tf.constant(x, dtype=np.float32)
    x2 = tf.constant(x, dtype=np.float32)

    out1 = conv1(x1)
    out2 = conv1(x2)

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()
      self.assertAllClose(out1.eval(), out2.eval())

      # Kernel shape was set to 3, which is expandeded to [3, 3, 3].
      # Input channels are 1, output channels := in_channels * multiplier.
      # multiplier is kernel_shape[2] == 3. So weight layout must be:
      # (3, 3, 1, 3).
      w = np.random.randn(3, 3, 1, 3)  # Now change the weights.
      conv1.w.assign(w).eval()
      self.assertAllClose(out1.eval(), out2.eval())
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def testShapesNotKnown(self, use_bias):
    """Test that the generated shapes are correct when input shape not known."""

    inputs = tf.placeholder(
        tf.float32, shape=[None, None, None, self.in_channels], name="inputs")

    conv1 = snt.SeparableConv2D(
        output_channels=self.out_channels_dw,
        channel_multiplier=1,
        kernel_shape=self.kernel_shape,
        padding=snt.SAME,
        use_bias=use_bias)
    output = conv1(inputs)

    with self.test_session():
      tf.variables_initializer(
          [conv1.w_dw, conv1.w_pw, conv1.b] if use_bias else
          [conv1.w_dw, conv1.w_pw]).run()
      output_eval = output.eval({inputs: np.zeros(self.input_shape)})
      self.assertEqual(output_eval.shape, tuple(self.output_shape))
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 59 收藏 0 点赞 0 评论 0
def testComputationValidMultiChannel(self, use_bias):
    """Run through for something with a known answer using snt.VALID padding."""

    conv1 = snt.SeparableConv2D(
        output_channels=3,
        channel_multiplier=1,
        kernel_shape=[3, 3],
        padding=snt.VALID,
        use_bias=use_bias,
        initializers=create_separable_constant_initializers(
            1.0, 1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 5, 3], dtype=np.float32)))
    expected_out = np.array([[[28] * 3] * 3] * 3)
    if not use_bias:
      expected_out -= 1

    with self.test_session():
      tf.variables_initializer(
          [conv1.w_dw, conv1.w_pw, conv1.b] if use_bias else
          [conv1.w_dw, conv1.w_pw]).run()

      self.assertAllClose(np.reshape(out.eval(), [3, 3, 3]), expected_out)
conv_test.py 文件源码 项目:sonnet 作者: deepmind 项目源码 文件源码 阅读 35 收藏 0 点赞 0 评论 0
def testComputationValid(self, use_bias):
    """Run through for something with a known answer using snt.VALID padding."""

    conv1 = snt.Conv3D(
        output_channels=1,
        kernel_shape=3,
        stride=1,
        padding=snt.VALID,
        name="conv1",
        use_bias=use_bias,
        initializers=create_constant_initializers(1.0, 1.0, use_bias))

    out = conv1(tf.constant(np.ones([1, 5, 5, 5, 1], dtype=np.float32)))
    expected_out = np.asarray([28] * 27).reshape((3, 3, 3))

    if not use_bias:
      expected_out -= 1

    with self.test_session():
      tf.variables_initializer(
          [conv1.w, conv1.b] if use_bias else [conv1.w]).run()

      self.assertAllClose(
          np.reshape(out.eval(), [3, 3, 3]), expected_out)
hyper_gradients.py 文件源码 项目:RFHO 作者: lucfra 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def initialize(self, session=None):
        """
        Helper for initializing all the variables. Builds and runs model variables and global step initializers.
        Note that dual variables are initialized only when calling `backward`.

        :param session: optional tensorflow session (if None default session is used) 

        :return: None
        """
        ss = session or tf.get_default_session()
        assert ss, 'No default tensorflow session!'
        if isinstance(self.w, MergedVariable):
            self.w.initialize(session=session)
        else:
            ss.run(tf.variables_initializer([self.w]))
        ss.run(tf.variables_initializer(self.hyper_gradient_vars + [self.global_step.var]))
hyper_gradients.py 文件源码 项目:RFHO 作者: lucfra 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def initialize(self, session=None):
        """
        Helper for initializing all the variables. Builds and runs model variables, 
        Zs and global step initializers.

        :param session: optional tensorflow session (if None default session is used) 

        :return: None
        """
        ss = session or tf.get_default_session()
        assert ss, 'No default tensorflow session!'
        if isinstance(self.w, MergedVariable):
            self.w.initialize(session=session)
        else:
            ss.run(tf.variables_initializer([self.w]))  # never tested
        ss.run(tf.variables_initializer(self.hyper_gradient_vars + [self.global_step.var]))
        [z.initializer().run() for z in self.zs]
        return True
model.py 文件源码 项目:AM-GAN 作者: ZhimingZhou 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def model_initilization(self, cfg):

        ############################################################################################################################################
        def initialization():
            var_list = tf.global_variables()
            for var in var_list:
                self.sess.run(tf.variables_initializer([var]), feed_dict={self.z: self.sample_z[:cfg.iBatchSize], self.images_lab: self.sample_images[:cfg.iBatchSize], self.fInputNoise: cfg.fInputNoise})
                print(var.op.name)

            #self.sess.run(tf.initialize_all_tables(), feed_dict={self.z: self.sample_z[:cfg.iBatchSize], self.images_lab: self.sample_images[:cfg.iBatchSize], self.fInputNoise: cfg.fInputNoiseBiG})

        print('optimizor initialization')

        if cfg.bLoadCheckpoint:
            if self.load(cfg):
                print(" [*] Load SUCCESS")
            else:
                print(" [!] Load failed...")
                initialization()
        else:
            initialization()
tensorflow_backend.py 文件源码 项目:deep-learning-keras-projects 作者: jasmeetsb 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def _initialize_variables():
    if hasattr(tf, 'global_variables'):
        variables = tf.global_variables()
    else:
        variables = tf.all_variables()

    uninitialized_variables = []
    for v in variables:
        if not hasattr(v, '_keras_initialized') or not v._keras_initialized:
            uninitialized_variables.append(v)
            v._keras_initialized = True
    if uninitialized_variables:
        sess = get_session()
        if hasattr(tf, 'variables_initializer'):
            sess.run(tf.variables_initializer(uninitialized_variables))
        else:
            sess.run(tf.initialize_variables(uninitialized_variables))
net_utils.py 文件源码 项目:tensorflow_yolo2 作者: wenxichen 项目源码 文件源码 阅读 40 收藏 0 点赞 0 评论 0
def restore_inception_resnet_variables_from_weight(sess, weights_path):

    adam_vars = [var for var in tf.global_variables()
                 if 'Adam' in var.name or
                 'beta1_power' in var.name or
                 'beta2_power' in var.name]
    uninit_vars = tf.get_collection(
        tf.GraphKeys.GLOBAL_VARIABLES, scope='InceptionResnetV2/Conv2d_1a_3x3') + adam_vars
    init_op = tf.variables_initializer(uninit_vars)

    variables_to_restore = slim.get_variables_to_restore(
        exclude=['InceptionResnetV2/Conv2d_1a_3x3'])
    for var in uninit_vars:
        if var in variables_to_restore:
            variables_to_restore.remove(var)
    saver = tf.train.Saver(variables_to_restore)

    print 'Initializing new variables to train from downloaded inception resnet weights'
    sess.run(init_op)
    saver.restore(sess, weights_path)

    return 0
tensorflow_backend.py 文件源码 项目:keras-customized 作者: ambrite 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def _initialize_variables():
    if hasattr(tf, 'global_variables'):
        variables = tf.global_variables()
    else:
        variables = tf.all_variables()

    uninitialized_variables = []
    for v in variables:
        if not hasattr(v, '_keras_initialized') or not v._keras_initialized:
            uninitialized_variables.append(v)
            v._keras_initialized = True
    if uninitialized_variables:
        sess = get_session()
        if hasattr(tf, 'variables_initializer'):
            sess.run(tf.variables_initializer(uninitialized_variables))
        else:
            sess.run(tf.initialize_variables(uninitialized_variables))
utils.py 文件源码 项目:lang2program 作者: kelvinguu 项目源码 文件源码 阅读 37 收藏 0 点赞 0 评论 0
def guarantee_initialized_variables(session, variables=None):
    """Guarantee that all the specified variables are initialized.

    If a variable is already initialized, leave it alone. Otherwise, initialize it.

    If no variables are specified, checks all variables in the default graph.

    Args:
        variables (list[tf.Variable])
    """
    name_to_var = {v.op.name: v for v in tf.global_variables() + tf.local_variables()}
    uninitialized_variables = list(name_to_var[name] for name in
                                   session.run(tf.report_uninitialized_variables(variables)))
    init_op = tf.variables_initializer(uninitialized_variables)
    session.run(init_op)
    return uninitialized_variables
utils.py 文件源码 项目:lang2program 作者: kelvinguu 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def guarantee_initialized_variables(session, variables=None):
    """Guarantee that all the specified variables are initialized.

    If a variable is already initialized, leave it alone. Otherwise, initialize it.

    If no variables are specified, checks all variables in the default graph.

    Args:
        variables (list[tf.Variable])
    """
    name_to_var = {v.op.name: v for v in tf.global_variables() + tf.local_variables()}
    uninitialized_variables = list(name_to_var[name] for name in
                                   session.run(tf.report_uninitialized_variables(variables)))
    init_op = tf.variables_initializer(uninitialized_variables)
    session.run(init_op)
    return uninitialized_variables
utils.py 文件源码 项目:image-segmentation-fcn 作者: ljanyst 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def initialize_uninitialized_variables(sess):
    """
    Only initialize the weights that have not yet been initialized by other
    means, such as importing a metagraph and a checkpoint. It's useful when
    extending an existing model.
    """
    uninit_vars    = []
    uninit_tensors = []
    for var in tf.global_variables():
        uninit_vars.append(var)
        uninit_tensors.append(tf.is_variable_initialized(var))
    uninit_bools = sess.run(uninit_tensors)
    uninit = zip(uninit_bools, uninit_vars)
    uninit = [var for init, var in uninit if not init]
    sess.run(tf.variables_initializer(uninit))

#-------------------------------------------------------------------------------
trainer.py 文件源码 项目:dvae 作者: dojoteef 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def _initialize_metrics(self):
        """ Initialize the model metrics """
        self.metrics = {}
        self.metric_values = {}
        self.update_metrics = {}
        self.reset_metrics = {}
        for data_scope in (Data.TRAIN, Data.VALIDATE, Data.TEST):
            metrics = self.collect_metrics(data_scope)
            self.metrics[data_scope] = metrics

            self.metric_values[data_scope] = {
                name: metric['scalar']
                for name, metric in iteritems(metrics)}

            self.update_metrics[data_scope] = [
                metric['update_op']
                for metric in itervalues(metrics)]

            metric_variables = []
            with stats_utils.metric_scope(data_scope, graph=self.graph) as scope:
                for local in tf.get_collection(tf.GraphKeys.LOCAL_VARIABLES, scope):
                    metric_variables.append(local)
            self.reset_metrics[data_scope] = tf.variables_initializer(metric_variables)
trainer.py 文件源码 项目:dvae 作者: dojoteef 项目源码 文件源码 阅读 35 收藏 0 点赞 0 评论 0
def _init_variables(self):
        """ Create the initialization operation for the variables """
        # Adam optimizer uses two variables that can only be accessed through the use of a protected
        # function since the variables aren't scoped in anyway. Trying to add a tf.variable_scope
        # around apply_gradients where the variables are created did not help.
        var_list = set(self.optimizer._get_beta_accumulators()) # pylint: disable=protected-access
        slot_names = self.optimizer.get_slot_names()
        for tower in self.towers:
            variables = tower.global_variables
            var_list.update(variables)

            for slot_name in slot_names:
                for variable in variables:
                    slot = self.optimizer.get_slot(variable, slot_name)
                    if slot is not None:
                        var_list.add(slot)

        # Initialize all the variables
        self.initialization_operation = tf.group(
            tf.variables_initializer(var_list),

            # Apparently local variables are not part of 'all' variables... go figure
            # This is needed for metrics for example
            tf.local_variables_initializer())


问题


面经


文章

微信
公众号

扫码关注公众号