python类BufferedReader()的实例源码

test_form.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def test_resource_form_create_valid(self, mock_open):
        dataset = Dataset()

        app = self._get_test_app()
        env, response = _get_resource_new_page_as_sysadmin(app, dataset['id'])
        form = response.forms['resource-edit']

        upload = ('upload', 'valid.csv', VALID_CSV)

        valid_stream = io.BufferedReader(io.BytesIO(VALID_CSV))

        with mock.patch('io.open', return_value=valid_stream):

            submit_and_follow(app, form, env, 'save', upload_files=[upload])

        dataset = call_action('package_show', id=dataset['id'])

        assert_equals(dataset['resources'][0]['validation_status'], 'success')
        assert 'validation_timestamp' in dataset['resources'][0]
makefile.py 文件源码 项目:jira_worklog_scanner 作者: pgarneau 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def backport_makefile(self, mode="r", buffering=None, encoding=None,
                      errors=None, newline=None):
    """
    Backport of ``socket.makefile`` from Python 3.5.
    """
    if not set(mode) <= set(["r", "w", "b"]):
        raise ValueError(
            "invalid mode %r (only r, w, b allowed)" % (mode,)
        )
    writing = "w" in mode
    reading = "r" in mode or not writing
    assert reading or writing
    binary = "b" in mode
    rawmode = ""
    if reading:
        rawmode += "r"
    if writing:
        rawmode += "w"
    raw = SocketIO(self, rawmode)
    self._makefile_refs += 1
    if buffering is None:
        buffering = -1
    if buffering < 0:
        buffering = io.DEFAULT_BUFFER_SIZE
    if buffering == 0:
        if not binary:
            raise ValueError("unbuffered streams must be binary")
        return raw
    if reading and writing:
        buffer = io.BufferedRWPair(raw, raw, buffering)
    elif reading:
        buffer = io.BufferedReader(raw, buffering)
    else:
        assert writing
        buffer = io.BufferedWriter(raw, buffering)
    if binary:
        return buffer
    text = io.TextIOWrapper(buffer, encoding, errors, newline)
    text.mode = mode
    return text
test_form.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def test_resource_form_create_invalid(self, mock_open):
        dataset = Dataset()

        app = self._get_test_app()
        env, response = _get_resource_new_page_as_sysadmin(app, dataset['id'])
        form = response.forms['resource-edit']

        upload = ('upload', 'invalid.csv', INVALID_CSV)

        invalid_stream = io.BufferedReader(io.BytesIO(INVALID_CSV))

        with mock.patch('io.open', return_value=invalid_stream):

            response = webtest_submit(
                form, 'save', upload_files=[upload], extra_environ=env)

        assert_in('validation', response.body)
        assert_in('missing-value', response.body)
        assert_in('Row 2 has a missing value in column 4', response.body)
test_form.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 34 收藏 0 点赞 0 评论 0
def test_resource_form_update_valid(self, mock_open):

        dataset = Dataset(resources=[
            {
                'url': 'https://example.com/data.csv'
            }
        ])

        app = self._get_test_app()
        env, response = _get_resource_update_page_as_sysadmin(
            app, dataset['id'], dataset['resources'][0]['id'])
        form = response.forms['resource-edit']

        upload = ('upload', 'valid.csv', VALID_CSV)

        valid_stream = io.BufferedReader(io.BytesIO(VALID_CSV))

        with mock.patch('io.open', return_value=valid_stream):

            submit_and_follow(app, form, env, 'save', upload_files=[upload])

        dataset = call_action('package_show', id=dataset['id'])

        assert_equals(dataset['resources'][0]['validation_status'], 'success')
        assert 'validation_timestamp' in dataset['resources'][0]
test_form.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def test_resource_form_update_invalid(self, mock_open):

        dataset = Dataset(resources=[
            {
                'url': 'https://example.com/data.csv'
            }
        ])

        app = self._get_test_app()
        env, response = _get_resource_update_page_as_sysadmin(
            app, dataset['id'], dataset['resources'][0]['id'])
        form = response.forms['resource-edit']

        upload = ('upload', 'invalid.csv', INVALID_CSV)

        invalid_stream = io.BufferedReader(io.BytesIO(INVALID_CSV))

        with mock.patch('io.open', return_value=invalid_stream):

            response = webtest_submit(
                form, 'save', upload_files=[upload], extra_environ=env)

        assert_in('validation', response.body)
        assert_in('missing-value', response.body)
        assert_in('Row 2 has a missing value in column 4', response.body)
test_logic.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def test_validation_fails_on_upload(self, mock_open):

        invalid_file = StringIO.StringIO()
        invalid_file.write(INVALID_CSV)

        mock_upload = MockFieldStorage(invalid_file, 'invalid.csv')

        dataset = factories.Dataset()

        invalid_stream = io.BufferedReader(io.BytesIO(INVALID_CSV))

        with mock.patch('io.open', return_value=invalid_stream):

            with assert_raises(t.ValidationError) as e:

                    call_action(
                        'resource_create',
                        package_id=dataset['id'],
                        format='CSV',
                        upload=mock_upload
                    )

        assert 'validation' in e.exception.error_dict
        assert 'missing-value' in str(e.exception)
        assert 'Row 2 has a missing value in column 4' in str(e.exception)
test_logic.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def test_validation_passes_on_upload(self, mock_open):

        invalid_file = StringIO.StringIO()
        invalid_file.write(VALID_CSV)

        mock_upload = MockFieldStorage(invalid_file, 'invalid.csv')

        dataset = factories.Dataset()

        valid_stream = io.BufferedReader(io.BytesIO(VALID_CSV))

        with mock.patch('io.open', return_value=valid_stream):

            resource = call_action(
                'resource_create',
                package_id=dataset['id'],
                format='CSV',
                upload=mock_upload
            )

        assert_equals(resource['validation_status'], 'success')
        assert 'validation_timestamp' in resource
binary_reader.py 文件源码 项目:Telethon 作者: LonamiWebs 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def __init__(self, data=None, stream=None):
        if data:
            self.stream = BytesIO(data)
        elif stream:
            self.stream = stream
        else:
            raise InvalidParameterError(
                'Either bytes or a stream must be provided')

        self.reader = BufferedReader(self.stream)
        self._last = None  # Should come in handy to spot -404 errors

    # region Reading

    # "All numbers are written as little endian."
    # https://core.telegram.org/mtproto
makefile.py 文件源码 项目:atc_alexa 作者: ckuzma 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def backport_makefile(self, mode="r", buffering=None, encoding=None,
                      errors=None, newline=None):
    """
    Backport of ``socket.makefile`` from Python 3.5.
    """
    if not set(mode) <= set(["r", "w", "b"]):
        raise ValueError(
            "invalid mode %r (only r, w, b allowed)" % (mode,)
        )
    writing = "w" in mode
    reading = "r" in mode or not writing
    assert reading or writing
    binary = "b" in mode
    rawmode = ""
    if reading:
        rawmode += "r"
    if writing:
        rawmode += "w"
    raw = SocketIO(self, rawmode)
    self._makefile_refs += 1
    if buffering is None:
        buffering = -1
    if buffering < 0:
        buffering = io.DEFAULT_BUFFER_SIZE
    if buffering == 0:
        if not binary:
            raise ValueError("unbuffered streams must be binary")
        return raw
    if reading and writing:
        buffer = io.BufferedRWPair(raw, raw, buffering)
    elif reading:
        buffer = io.BufferedReader(raw, buffering)
    else:
        assert writing
        buffer = io.BufferedWriter(raw, buffering)
    if binary:
        return buffer
    text = io.TextIOWrapper(buffer, encoding, errors, newline)
    text.mode = mode
    return text
structure.py 文件源码 项目:midi 作者: MicroTransactionsMatterToo 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def __init__(self, data: Union[FileIO, BufferedReader]) -> None:
        chunk_type = data.read(4)
        if chunk_type != b'MThd':
            raise ValueError("File had invalid header chunk type")

        header_length = int.from_bytes(data.read(4), 'big')
        if header_length != 6:
            raise ValueError("File has unsupported header length")
        self.length = header_length

        format = int.from_bytes(data.read(2), 'big')
        if format not in [0, 1, 2]:
            raise ValueError("File has unsupported format")
        self.format = format

        ntrks = int.from_bytes(data.read(2), 'big')
        if ntrks > 0 and format == 0:
            raise ValueError("Multiple tracks in single track format")
        self.ntrks = ntrks

        self.tpqn = int.from_bytes(data.read(2), 'big')
request.py 文件源码 项目:Intranet-Penetration 作者: yuxiaokui 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def _body_file__get(self):
        """
            Input stream of the request (wsgi.input).
            Setting this property resets the content_length and seekable flag
            (unlike setting req.body_file_raw).
        """
        if not self.is_body_readable:
            return io.BytesIO()
        r = self.body_file_raw
        clen = self.content_length
        if not self.is_body_seekable and clen is not None:
            # we need to wrap input in LimitedLengthFile
            # but we have to cache the instance as well
            # otherwise this would stop working
            # (.remaining counter would reset between calls):
            #   req.body_file.read(100)
            #   req.body_file.read(100)
            env = self.environ
            wrapped, raw = env.get('webob._body_file', (0,0))
            if raw is not r:
                wrapped = LimitedLengthFile(r, clen)
                wrapped = io.BufferedReader(wrapped)
                env['webob._body_file'] = wrapped, r
            r = wrapped
        return r
request.py 文件源码 项目:MKFQ 作者: maojingios 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def _body_file__get(self):
        """
            Input stream of the request (wsgi.input).
            Setting this property resets the content_length and seekable flag
            (unlike setting req.body_file_raw).
        """
        if not self.is_body_readable:
            return io.BytesIO()
        r = self.body_file_raw
        clen = self.content_length
        if not self.is_body_seekable and clen is not None:
            # we need to wrap input in LimitedLengthFile
            # but we have to cache the instance as well
            # otherwise this would stop working
            # (.remaining counter would reset between calls):
            #   req.body_file.read(100)
            #   req.body_file.read(100)
            env = self.environ
            wrapped, raw = env.get('webob._body_file', (0,0))
            if raw is not r:
                wrapped = LimitedLengthFile(r, clen)
                wrapped = io.BufferedReader(wrapped)
                env['webob._body_file'] = wrapped, r
            r = wrapped
        return r
request.py 文件源码 项目:xxNet 作者: drzorm 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def _body_file__get(self):
        """
            Input stream of the request (wsgi.input).
            Setting this property resets the content_length and seekable flag
            (unlike setting req.body_file_raw).
        """
        if not self.is_body_readable:
            return io.BytesIO()
        r = self.body_file_raw
        clen = self.content_length
        if not self.is_body_seekable and clen is not None:
            # we need to wrap input in LimitedLengthFile
            # but we have to cache the instance as well
            # otherwise this would stop working
            # (.remaining counter would reset between calls):
            #   req.body_file.read(100)
            #   req.body_file.read(100)
            env = self.environ
            wrapped, raw = env.get('webob._body_file', (0,0))
            if raw is not r:
                wrapped = LimitedLengthFile(r, clen)
                wrapped = io.BufferedReader(wrapped)
                env['webob._body_file'] = wrapped, r
            r = wrapped
        return r
test_dem.py 文件源码 项目:game-tools 作者: joshuaskelly 项目源码 文件源码 阅读 37 收藏 0 点赞 0 评论 0
def test_dem(self):
        d0 = dem.Dem.open('./test_data/test.dem')
        d0.close()

        d0.save(self.buff)
        self.buff.seek(0)

        b = io.BufferedReader(self.buff)
        d1 = dem.Dem.open(b)

        self.assertEqual(d1.cd_track, '2', 'Cd track should be 2')
        self.assertEqual(len(d1.message_blocks), 168, 'The demo should have 168 message blocks')

        last_message_of_first_block = d1.message_blocks[0].messages[-1]

        self.assertTrue(isinstance(last_message_of_first_block, dem.SignOnNum), 'The last message of the first block should be a SignOnNum')
        self.assertEqual(last_message_of_first_block.sign_on, 1, 'Sign on value should be 1')
        self.assertTrue(isinstance(d1.message_blocks[-1].messages[0], dem.Disconnect), 'The last message should be a Disconnect')

        self.assertFalse(d1.fp.closed, 'File should be open')
        fp = d1.fp
        d1.close()
        self.assertTrue(fp.closed, 'File should be closed')
        self.assertIsNone(d1.fp, 'File pointer should be cleaned up')
eds.py 文件源码 项目:canopen 作者: christiansandberg 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def import_from_node(node_id, network):
    # Create temporary SDO client
    sdo_client = SdoClient(0x600 + node_id, 0x580 + node_id, None)
    sdo_client.network = network
    # Subscribe to SDO responses
    network.subscribe(0x580 + node_id, sdo_client.on_response)
    # Create file like object for Store EDS variable
    try:
        eds_fp = ReadableStream(sdo_client, 0x1021)
        eds_fp = io.BufferedReader(eds_fp)
        eds_fp = io.TextIOWrapper(eds_fp, "ascii")
        od = import_eds(eds_fp, node_id)
    except Exception as e:
        logger.error("No object dictionary could be loaded for node %d: %s",
                     node_id, e)
        od = None
    finally:
        network.unsubscribe(0x580 + node_id)
    return od
tarfile.py 文件源码 项目:python- 作者: secondtonone1 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def extractfile(self, member):
        """Extract a member from the archive as a file object. `member' may be
           a filename or a TarInfo object. If `member' is a regular file or a
           link, an io.BufferedReader object is returned. Otherwise, None is
           returned.
        """
        self._check("r")

        if isinstance(member, str):
            tarinfo = self.getmember(member)
        else:
            tarinfo = member

        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
            # Members with unknown types are treated as regular files.
            return self.fileobject(self, tarinfo)

        elif tarinfo.islnk() or tarinfo.issym():
            if isinstance(self.fileobj, _Stream):
                # A small but ugly workaround for the case that someone tries
                # to extract a (sym)link as a file-object from a non-seekable
                # stream of tar blocks.
                raise StreamError("cannot extract (sym)link as file object")
            else:
                # A (sym)link's file object is its target's file object.
                return self.extractfile(self._find_link_target(tarinfo))
        else:
            # If there's no data associated with the member (directory, chrdev,
            # blkdev, etc.), return None instead of a file object.
            return None
makefile.py 文件源码 项目:my-first-blog 作者: AnkurBegining 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def backport_makefile(self, mode="r", buffering=None, encoding=None,
                      errors=None, newline=None):
    """
    Backport of ``socket.makefile`` from Python 3.5.
    """
    if not set(mode) <= set(["r", "w", "b"]):
        raise ValueError(
            "invalid mode %r (only r, w, b allowed)" % (mode,)
        )
    writing = "w" in mode
    reading = "r" in mode or not writing
    assert reading or writing
    binary = "b" in mode
    rawmode = ""
    if reading:
        rawmode += "r"
    if writing:
        rawmode += "w"
    raw = SocketIO(self, rawmode)
    self._makefile_refs += 1
    if buffering is None:
        buffering = -1
    if buffering < 0:
        buffering = io.DEFAULT_BUFFER_SIZE
    if buffering == 0:
        if not binary:
            raise ValueError("unbuffered streams must be binary")
        return raw
    if reading and writing:
        buffer = io.BufferedRWPair(raw, raw, buffering)
    elif reading:
        buffer = io.BufferedReader(raw, buffering)
    else:
        assert writing
        buffer = io.BufferedWriter(raw, buffering)
    if binary:
        return buffer
    text = io.TextIOWrapper(buffer, encoding, errors, newline)
    text.mode = mode
    return text
test_logic.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 37 收藏 0 点赞 0 评论 0
def test_validation_fails_on_upload(self, mock_open):

        dataset = factories.Dataset(resources=[
            {
                'url': 'https://example.com/data.csv'
            }
        ])

        invalid_file = StringIO.StringIO()
        invalid_file.write(INVALID_CSV)

        mock_upload = MockFieldStorage(invalid_file, 'invalid.csv')

        invalid_stream = io.BufferedReader(io.BytesIO(INVALID_CSV))

        with mock.patch('io.open', return_value=invalid_stream):

            with assert_raises(t.ValidationError) as e:

                call_action(
                    'resource_update',
                    id=dataset['resources'][0]['id'],
                    format='CSV',
                    upload=mock_upload
                )

        assert 'validation' in e.exception.error_dict
        assert 'missing-value' in str(e.exception)
        assert 'Row 2 has a missing value in column 4' in str(e.exception)
test_logic.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def test_validation_fails_no_validation_object_stored(self, mock_open):

        dataset = factories.Dataset(resources=[
            {
                'url': 'https://example.com/data.csv'
            }
        ])

        invalid_file = StringIO.StringIO()
        invalid_file.write(INVALID_CSV)

        mock_upload = MockFieldStorage(invalid_file, 'invalid.csv')

        invalid_stream = io.BufferedReader(io.BytesIO(INVALID_CSV))

        validation_count_before = model.Session.query(Validation).count()

        with mock.patch('io.open', return_value=invalid_stream):

            with assert_raises(t.ValidationError):

                call_action(
                    'resource_update',
                    id=dataset['resources'][0]['id'],
                    format='CSV',
                    upload=mock_upload
                )

        validation_count_after = model.Session.query(Validation).count()

        assert_equals(validation_count_after, validation_count_before)
test_logic.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def test_validation_passes_on_upload(self, mock_open):

        dataset = factories.Dataset(resources=[
            {
                'url': 'https://example.com/data.csv'
            }
        ])

        valid_file = StringIO.StringIO()
        valid_file.write(INVALID_CSV)

        mock_upload = MockFieldStorage(valid_file, 'valid.csv')

        valid_stream = io.BufferedReader(io.BytesIO(VALID_CSV))

        with mock.patch('io.open', return_value=valid_stream):

            resource = call_action(
                'resource_update',
                id=dataset['resources'][0]['id'],
                format='CSV',
                upload=mock_upload
            )

        assert_equals(resource['validation_status'], 'success')
        assert 'validation_timestamp' in resource
test_jobs.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def test_job_local_paths_are_hidden(self, mock_open):

        invalid_csv = 'id,type\n' + '1,a,\n' * 1010
        invalid_file = StringIO.StringIO()

        invalid_file.write(invalid_csv)

        mock_upload = MockFieldStorage(invalid_file, 'invalid.csv')

        resource = factories.Resource(format='csv', upload=mock_upload)

        invalid_stream = io.BufferedReader(io.BytesIO(invalid_csv))

        with mock.patch('io.open', return_value=invalid_stream):

            run_validation_job(resource)

        validation = Session.query(Validation).filter(
            Validation.resource_id == resource['id']).one()

        source = validation.report['tables'][0]['source']
        assert source.startswith('http')
        assert source.endswith('invalid.csv')

        warning = validation.report['warnings'][0]
        assert_equals(
            warning, 'Table inspection has reached 1000 row(s) limit')
test_jobs.py 文件源码 项目:ckanext-validation 作者: frictionlessdata 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def test_job_pass_validation_options_string(self, mock_open):

        invalid_csv = '''

a;b;c
#comment
1;2;3
'''

        validation_options = '''{
            "headers": 3,
            "skip_rows": ["#"]
        }'''

        invalid_file = StringIO.StringIO()

        invalid_file.write(invalid_csv)

        mock_upload = MockFieldStorage(invalid_file, 'invalid.csv')

        resource = factories.Resource(
            format='csv',
            upload=mock_upload,
            validation_options=validation_options)

        invalid_stream = io.BufferedReader(io.BytesIO(invalid_csv))

        with mock.patch('io.open', return_value=invalid_stream):

            run_validation_job(resource)

        validation = Session.query(Validation).filter(
            Validation.resource_id == resource['id']).one()

        assert_equals(validation.report['valid'], True)
binary_reader.py 文件源码 项目:BitBot 作者: crack00r 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def __init__(self, data=None, stream=None):
        if data:
            self.stream = BytesIO(data)
        elif stream:
            self.stream = stream
        else:
            raise InvalidParameterError(
                'Either bytes or a stream must be provided')

        self.reader = BufferedReader(self.stream)

    # region Reading

    # "All numbers are written as little endian." |> Source: https://core.telegram.org/mtproto
makefile.py 文件源码 项目:googletranslate.popclipext 作者: wizyoung 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def backport_makefile(self, mode="r", buffering=None, encoding=None,
                      errors=None, newline=None):
    """
    Backport of ``socket.makefile`` from Python 3.5.
    """
    if not set(mode) <= set(["r", "w", "b"]):
        raise ValueError(
            "invalid mode %r (only r, w, b allowed)" % (mode,)
        )
    writing = "w" in mode
    reading = "r" in mode or not writing
    assert reading or writing
    binary = "b" in mode
    rawmode = ""
    if reading:
        rawmode += "r"
    if writing:
        rawmode += "w"
    raw = SocketIO(self, rawmode)
    self._makefile_refs += 1
    if buffering is None:
        buffering = -1
    if buffering < 0:
        buffering = io.DEFAULT_BUFFER_SIZE
    if buffering == 0:
        if not binary:
            raise ValueError("unbuffered streams must be binary")
        return raw
    if reading and writing:
        buffer = io.BufferedRWPair(raw, raw, buffering)
    elif reading:
        buffer = io.BufferedReader(raw, buffering)
    else:
        assert writing
        buffer = io.BufferedWriter(raw, buffering)
    if binary:
        return buffer
    text = io.TextIOWrapper(buffer, encoding, errors, newline)
    text.mode = mode
    return text
clients.py 文件源码 项目:prov2bigchaindb 作者: DLR-SC 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def save_document(self, document: str or BufferedReader or provmodel.ProvDocument) -> list:
        """
        Write a document into BigchainDB

        :param document: Document as JSON/XML/PROVN
        :type document: str or BufferedReader or ProvDocument
        :return: List of transaction ids
        :rtype: list
        """
        log.info("Save document...")
        document_tx_ids = []
        prov_document = utils.to_prov_document(content=document)
        elements = GraphConceptClient.calculate_account_data(prov_document)
        id_mapping = {}
        log.info("Create and Save instances")
        for prov_element, prov_relations, namespaces in elements:
            for rel in prov_relations['with_id']:
                id_mapping[rel.identifier] = ''

        for prov_element, prov_relations, namespaces in elements:
            account = accounts.GraphConceptAccount(prov_element, prov_relations, id_mapping, namespaces, self.store)
            self.accounts.append(account)
            tx_id = account.save_instance_asset(self._get_bigchain_connection())
            document_tx_ids.append(tx_id)

        log.info("Save relations with ids")
        for account in filter(lambda acc: acc.has_relations_with_id, self.accounts):
            document_tx_ids += account.save_relations_with_ids(self._get_bigchain_connection())

        log.info("Save relations without ids")
        for account in filter(lambda acc: acc.has_relations_without_id, self.accounts):
            document_tx_ids += account.save_relations_without_ids(self._get_bigchain_connection())

        log.info("Saved document in %s Tx", len(document_tx_ids))
        return document_tx_ids
clients.py 文件源码 项目:prov2bigchaindb 作者: DLR-SC 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def save_document(self, document: str or BufferedReader or provmodel.ProvDocument) -> list:
        """
        Write a document into BigchainDB

        :param document: Document as JSON/XML/PROVN
        :type document: str or BufferedReader or ProvDocument
        :return: List of transaction ids
        :rtype: list
        """
        log.info("Save document...")
        document_tx_ids = []
        prov_document = utils.to_prov_document(content=document)
        account_data = RoleConceptClient.calculate_account_data(prov_document)

        id_mapping = {}
        log.info("Create and Save instances")
        for agent, relations, elements, namespaces in account_data:
            account = accounts.RoleConceptAccount(agent, relations, elements, id_mapping, namespaces, self.store)
            self.accounts.append(account)
            tx_id = account.save_instance_asset(self._get_bigchain_connection())
            document_tx_ids.append(tx_id)

        log.info("Save elements")
        for account in self.accounts:
            document_tx_ids += account.save_elements(self._get_bigchain_connection())

        log.info("Saved document in %s Tx", len(document_tx_ids))
        return document_tx_ids
makefile.py 文件源码 项目:Projects 作者: it2school 项目源码 文件源码 阅读 30 收藏 0 点赞 0 评论 0
def backport_makefile(self, mode="r", buffering=None, encoding=None,
                      errors=None, newline=None):
    """
    Backport of ``socket.makefile`` from Python 3.5.
    """
    if not set(mode) <= set(["r", "w", "b"]):
        raise ValueError(
            "invalid mode %r (only r, w, b allowed)" % (mode,)
        )
    writing = "w" in mode
    reading = "r" in mode or not writing
    assert reading or writing
    binary = "b" in mode
    rawmode = ""
    if reading:
        rawmode += "r"
    if writing:
        rawmode += "w"
    raw = SocketIO(self, rawmode)
    self._makefile_refs += 1
    if buffering is None:
        buffering = -1
    if buffering < 0:
        buffering = io.DEFAULT_BUFFER_SIZE
    if buffering == 0:
        if not binary:
            raise ValueError("unbuffered streams must be binary")
        return raw
    if reading and writing:
        buffer = io.BufferedRWPair(raw, raw, buffering)
    elif reading:
        buffer = io.BufferedReader(raw, buffering)
    else:
        assert writing
        buffer = io.BufferedWriter(raw, buffering)
    if binary:
        return buffer
    text = io.TextIOWrapper(buffer, encoding, errors, newline)
    text.mode = mode
    return text
makefile.py 文件源码 项目:pip-update-requirements 作者: alanhamlett 项目源码 文件源码 阅读 36 收藏 0 点赞 0 评论 0
def backport_makefile(self, mode="r", buffering=None, encoding=None,
                      errors=None, newline=None):
    """
    Backport of ``socket.makefile`` from Python 3.5.
    """
    if not set(mode) <= set(["r", "w", "b"]):
        raise ValueError(
            "invalid mode %r (only r, w, b allowed)" % (mode,)
        )
    writing = "w" in mode
    reading = "r" in mode or not writing
    assert reading or writing
    binary = "b" in mode
    rawmode = ""
    if reading:
        rawmode += "r"
    if writing:
        rawmode += "w"
    raw = SocketIO(self, rawmode)
    self._makefile_refs += 1
    if buffering is None:
        buffering = -1
    if buffering < 0:
        buffering = io.DEFAULT_BUFFER_SIZE
    if buffering == 0:
        if not binary:
            raise ValueError("unbuffered streams must be binary")
        return raw
    if reading and writing:
        buffer = io.BufferedRWPair(raw, raw, buffering)
    elif reading:
        buffer = io.BufferedReader(raw, buffering)
    else:
        assert writing
        buffer = io.BufferedWriter(raw, buffering)
    if binary:
        return buffer
    text = io.TextIOWrapper(buffer, encoding, errors, newline)
    text.mode = mode
    return text
binary_io.py 文件源码 项目:io_scene_kcl 作者: Syroot 项目源码 文件源码 阅读 40 收藏 0 点赞 0 评论 0
def __enter__(self):
        self.reader = io.BufferedReader(self.raw)
        return self
_winconsole.py 文件源码 项目:swjtu-pyscraper 作者: Desgard 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def _get_text_stdin(buffer_stream):
    text_stream = _NonClosingTextIOWrapper(
        io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)),
        'utf-16-le', 'strict', line_buffering=True)
    return ConsoleStream(text_stream, buffer_stream)


问题


面经


文章

微信
公众号

扫码关注公众号