python类FIFOQueue()的实例源码

input.py 文件源码 项目:reslearn 作者: mackcmillion 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def validation_inputs():
    fps, labels = _load_validation_labelmap()
    filepaths = tf.constant(fps)
    labels = tf.constant(labels, dtype=tf.int32)

    filename_queue = tf.FIFOQueue(len(fps), [tf.string, tf.int32], name='validation_filename_queue')
    enqueue_op = filename_queue.enqueue_many([filepaths, labels])
    qr = tf.train.QueueRunner(filename_queue, [enqueue_op])
    tf.train.add_queue_runner(qr)

    example_queue = tf.FIFOQueue(len(filepaths), [tf.float32, tf.int32], name='validation_example_queue')
    enqueue_op_ex = example_queue.enqueue(_read_and_preprocess_image_for_validation(filename_queue))
    qr_ex = tf.train.QueueRunner(example_queue, [enqueue_op_ex] * FLAGS.num_consuming_threads)
    tf.train.add_queue_runner(qr_ex)

    image_10crop, label = example_queue.dequeue()
    # do not one-hot-encode label here
    return image_10crop, label
cifar10_input_test.py 文件源码 项目:Tamp 作者: ColumbiaDVMM 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
cifar10_input_test.py 文件源码 项目:Tamp 作者: ColumbiaDVMM 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
cifar10_input_test.py 文件源码 项目:Tamp 作者: ColumbiaDVMM 项目源码 文件源码 阅读 38 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
cifar10_input_test.py 文件源码 项目:Tamp 作者: ColumbiaDVMM 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
cifar10_input_test.py 文件源码 项目:Tamp 作者: ColumbiaDVMM 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
cifar10_input_test.py 文件源码 项目:tensor_flow 作者: eecrazy 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
base.py 文件源码 项目:TensorBase 作者: dancsalo 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def queue_setup(filename, mode, batch_size, num_readers, min_examples):
        """ Sets up the queue runners for data input """
        filename_queue = tf.train.string_input_producer([filename], shuffle=True, capacity=16)
        if mode == "train":
            examples_queue = tf.RandomShuffleQueue(capacity=min_examples + 3 * batch_size,
                                                   min_after_dequeue=min_examples, dtypes=[tf.string])
        else:
            examples_queue = tf.FIFOQueue(capacity=min_examples + 3 * batch_size, dtypes=[tf.string])
        enqueue_ops = list()
        for _ in range(num_readers):
            reader = tf.TFRecordReader()
            _, value = reader.read(filename_queue)
            enqueue_ops.append(examples_queue.enqueue([value]))
        tf.train.queue_runner.add_queue_runner(tf.train.queue_runner.QueueRunner(examples_queue, enqueue_ops))
        example_serialized = examples_queue.dequeue()
        return example_serialized
speech_input.py 文件源码 项目:speechT 作者: timediv 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def __init__(self, input_size, batch_size, data_generator_creator, max_steps=None):

    super().__init__(input_size)
    self.batch_size = batch_size
    self.data_generator_creator = data_generator_creator
    self.steps_left = max_steps

    with tf.device("/cpu:0"):
      # Define input and label placeholders
      # inputs is of dimension [batch_size, max_time, input_size]
      self.inputs = tf.placeholder(tf.float32, [batch_size, None, input_size], name='inputs')
      self.sequence_lengths = tf.placeholder(tf.int32, [batch_size], name='sequence_lengths')
      self.labels = tf.sparse_placeholder(tf.int32, name='labels')

      # Queue for inputs and labels
      self.queue = tf.FIFOQueue(dtypes=[tf.float32, tf.int32, tf.string],
                                capacity=100)

      # queues do not support sparse tensors yet, we need to serialize...
      serialized_labels = tf.serialize_many_sparse(self.labels)

      self.enqueue_op = self.queue.enqueue([self.inputs,
                                            self.sequence_lengths,
                                            serialized_labels])
cifar10_input_test.py 文件源码 项目:TensorFlowOnSpark 作者: yahoo 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
cifar100.py 文件源码 项目:splitnet-wrn 作者: dalgu90 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def __init__(self, pkl_path, shuffle=False, distort=False,
                 capacity=2000, image_per_thread=16):
        self._shuffle = shuffle
        self._distort = distort
        with open(pkl_path, 'rb') as fd:
            data = pickle.load(fd)
        self._images = data['data'].reshape([-1, 3, 32, 32]).transpose((0, 2, 3, 1)).copy(order='C')
        self._labels = data['labels']  # numpy 1-D array
        self.size = len(self._labels)

        self.queue = tf.FIFOQueue(shapes=[[32,32,3], []],
                                  dtypes=[tf.float32, tf.int32],
                                  capacity=capacity)
        # self.queue = tf.RandomShuffleQueue(shapes=[[32,32,3], []],
                                           # dtypes=[tf.float32, tf.int32],
                                           # capacity=capacity,
                                           # min_after_dequeue=min_after_dequeue)
        self.dataX = tf.placeholder(dtype=tf.float32, shape=[None,32,32,3])
        self.dataY = tf.placeholder(dtype=tf.int32, shape=[None,])
        self.enqueue_op = self.queue.enqueue_many([self.dataX, self.dataY])
        self.image_per_thread = image_per_thread

        self._image_summary_added = False
cifar10_input_test.py 文件源码 项目:TensorFlowFinance 作者: mingyue312 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
cifar10_input_test.py 文件源码 项目:hops-tensorflow 作者: hopshadoop 项目源码 文件源码 阅读 35 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), tf.compat.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
trainer.py 文件源码 项目:ternarynet 作者: czhu95 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def __init__(self, config, input_queue=None, predict_tower=None):
        """
        :param config: a `TrainConfig` instance
        :param input_queue: a `tf.QueueBase` instance to be used to buffer datapoints.
            Defaults to a FIFO queue of size 100.
        :param predict_tower: list of gpu relative idx to run prediction. default to be [0].
            Use -1 for cpu.
        """
        super(QueueInputTrainer, self).__init__(config)
        self.input_vars = self.model.get_input_vars()
        # use a smaller queue size for now, to avoid https://github.com/tensorflow/tensorflow/issues/2942
        if input_queue is None:
            self.input_queue = tf.FIFOQueue(
                    50, [x.dtype for x in self.input_vars], name='input_queue')
        else:
            self.input_queue = input_queue

        # by default, use the first training gpu for prediction
        self.predict_tower = predict_tower or [0]
        self.dequed_inputs = None
BoxLoader.py 文件源码 项目:RFCN-tensorflow 作者: xdever 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def __init__(self, sources=[], initOnStart=True):
        self.totalCount = 0
        self.counts=[]
        self.sources=[]
        self.initDone=False
        self.initOnStart=initOnStart

        with tf.name_scope('dataset') as scope:
            self.queue = tf.FIFOQueue(dtypes=[tf.float32, tf.float32, tf.uint8],
                    capacity=self.QUEUE_CAPACITY)

            self.image = tf.placeholder(dtype=tf.float32, shape=[None, None, 3], name="image")
            self.boxes = tf.placeholder(dtype=tf.float32, shape=[None,4], name="boxes")
            self.classes = tf.placeholder(dtype=tf.uint8, shape=[None], name="classes")

            self.enqueueOp = self.queue.enqueue([self.image, self.boxes, self.classes])

        self.sources=sources[:]
__init__.py 文件源码 项目:tensorflow-extenteten 作者: raviqqe 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def monitored_queue(*tensors,
                    capacity,
                    metric_name="items_in_queue",
                    return_queue=False):
    queue = tf.FIFOQueue(capacity, dtypes(*tensors))
    collections.add_metric(queue.size(), metric_name)

    add_queue_runner(queue, [queue.enqueue(tensors)])

    if return_queue:
        return queue

    results = queue.dequeue()

    for tensor, result \
            in zip(tensors, results if isinstance(results, list) else [results]):
        result.set_shape(tensor.get_shape())

    return results
cifar10_input_test.py 文件源码 项目:tensorflow.cifar10 作者: yhlleo 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def testSimple(self):
    labels = [9, 3, 0]
    records = [self._record(labels[0], 0, 128, 255),
               self._record(labels[1], 255, 0, 1),
               self._record(labels[2], 254, 255, 0)]
    contents = b"".join([record for record, _ in records])
    expected = [expected for _, expected in records]
    filename = os.path.join(self.get_temp_dir(), "cifar")
    open(filename, "wb").write(contents)

    with self.test_session() as sess:
      q = tf.FIFOQueue(99, [tf.string], shapes=())
      q.enqueue([filename]).run()
      q.close().run()
      result = cifar10_input.read_cifar10(q)

      for i in range(3):
        key, label, uint8image = sess.run([
            result.key, result.label, result.uint8image])
        self.assertEqual("%s:%d" % (filename, i), cp.as_text(key))
        self.assertEqual(labels[i], label)
        self.assertAllEqual(expected[i], uint8image)

      with self.assertRaises(tf.errors.OutOfRangeError):
        sess.run([result.key, result.uint8image])
benchmark_cnn.py 文件源码 项目:benchmarks 作者: tensorflow 项目源码 文件源码 阅读 35 收藏 0 点赞 0 评论 0
def add_sync_queues_and_barrier(self, name_prefix, enqueue_after_list):
    """Adds ops to enqueue on all worker queues.

    Args:
      name_prefix: prefixed for the shared_name of ops.
      enqueue_after_list: control dependency from ops.

    Returns:
      an op that should be used as control dependency before starting next step.
    """
    self.sync_queue_counter += 1
    with tf.device(self.sync_queue_devices[(
        self.sync_queue_counter % len(self.sync_queue_devices))]):
      sync_queues = [
          tf.FIFOQueue(self.num_workers, [tf.bool], shapes=[[]],
                       shared_name='%s%s' % (name_prefix, i))
          for i in range(self.num_workers)]
      queue_ops = []
      # For each other worker, add an entry in a queue, signaling that it can
      # finish this step.
      token = tf.constant(False)
      with tf.control_dependencies(enqueue_after_list):
        for i, q in enumerate(sync_queues):
          if i == self.task_index:
            queue_ops.append(tf.no_op())
          else:
            queue_ops.append(q.enqueue(token))

      # Drain tokens off queue for this worker, one for each other worker.
      queue_ops.append(
          sync_queues[self.task_index].dequeue_many(len(sync_queues) - 1))

      return tf.group(*queue_ops)
DataFeeder.py 文件源码 项目:TFCommon 作者: MU94W 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def __init__(self, coordinator, placeholders, meta, batch_size=32, split_nums=None, is_validation=False):
        """

        :param coordinator: 
        :param placeholders:
        :param meta: 
        :param batch_size: 
        :param split_nums: 
        :param is_validation: 
        """
        super(BaseFeeder, self).__init__()
        queue = tf.FIFOQueue(capacity=math.ceil(batch_size/4), dtypes=[item.dtype for item in placeholders])
        self.queue = queue  # for buf inspect
        self.enqueue_op = queue.enqueue(placeholders)
        self.fed_holders = [None] * len(placeholders)   # None placeholder for dequeue
        self.fed_holders = queue.dequeue()
        for idx in range(len(placeholders)):
            self.fed_holders[idx].set_shape(placeholders[idx].shape)
        self._placeholders = placeholders
        self.coord = coordinator
        self.sess = None
        self.meta = meta
        key_lst = meta.get('key_lst')
        assert isinstance(key_lst, list) or isinstance(key_lst, tuple)
        self.key_lst = key_lst
        self.batch_size = batch_size
        self.split_bool = False if split_nums is None else True
        self.split_nums = split_nums
        assert isinstance(is_validation, bool)
        self.is_validation = is_validation
        self._total_samples = len(key_lst)
        self._iter = 0
        self._record_index = 0
        self._loss = 0.
model.py 文件源码 项目:ISLES2017 作者: MiguelMonteiro 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def input_fn(files, num_epochs=None, shuffle=False, shared_name=None):

    # get file names
    if file_io.is_directory(files[0]):
        file_names = file_io.get_matching_files(files[0] + '/*tfrecord')
    else:
        file_names = files

    # shuffle if required
    if shuffle:
        shuffle_fn(file_names)

    # queue with the file names that can be shared amongst workers during training
    filename_queue = tf.FIFOQueue(100, tf.string, shared_name=shared_name)
    enque_op = filename_queue.enqueue_many([tf.train.limit_epochs(file_names, num_epochs)])
    close_op = filename_queue.close(cancel_pending_enqueues=True)

    # create queue runner and add it to queue runners
    qr = tf.train.QueueRunner(filename_queue, [enque_op], close_op,
                              queue_closed_exception_types=(tf.errors.OutOfRangeError, tf.errors.CancelledError))
    tf.train.add_queue_runner(qr)

    # read example from file
    reader = tf.TFRecordReader()
    _, example = reader.read(filename_queue)

    # parse example
    image, ground_truth, example_name = parse_example(example)

    return image, ground_truth, example_name


问题


面经


文章

微信
公众号

扫码关注公众号