learningv2.py 文件源码

python
阅读 27 收藏 0 点赞 0 评论 0

项目:tefla 作者: openAGI 项目源码 文件源码
def _process_towers_grads(self, dataset, opt, model, is_training=True, reuse=None, loss_type='cross_entropy', is_classification=True):
        tower_grads = []
        tower_loss = []
        self.target_probs = tf.placeholder_with_default(tf.convert_to_tensor([1 / float(self.num_classes) for _ in range(0, self.num_classes)]),
                                                        shape=[self.num_classes, ], name="target_probs")
        with tf.variable_scope(tf.get_variable_scope()):
            for i in xrange(self.cnf.get('num_gpus', 1)):
                with tf.device('/gpu:%d' % i):
                    with tf.name_scope('%s_%d' % (self.cnf.get('TOWER_NAME', 'tower'), i)) as scope:
                        images, labels = distorted_inputs(dataset, self.cnf['tfrecords_im_size'], self.cnf.get(
                            'crop_size'), batch_size=self.cnf['batch_size_train'], num_preprocess_threads=32, num_readers=8, target_probs=self.target_probs, init_probs=tf.convert_to_tensor(self.cnf['init_probs']), image_preprocessing=self.preprocessor.preprocess_image, data_balancing=self.data_balancing)
                        labels = self._adjust_ground_truth(labels)
                        loss = self._tower_loss(scope, model, images, labels, is_training=is_training,
                                                reuse=i > 0, is_classification=is_classification, gpu_id=i, loss_type=loss_type)

                        tf.get_variable_scope().reuse_variables()
                        if self.clip_by_global_norm:
                            grads_and_vars = self._clip_grad_global_norms(tf.trainable_variables(
                            ), loss, opt, global_norm=self.norm_threshold, gradient_noise_scale=0.0)
                        else:
                            grads_and_vars = opt.compute_gradients(loss)
                        tower_grads.append(grads_and_vars)
                        tower_loss.append(loss)

        grads_and_vars = self._average_gradients(tower_grads)

        return grads_and_vars, sum(tower_loss)
评论列表
文章目录


问题


面经


文章

微信
公众号

扫码关注公众号