python类cursor()的实例源码

views.py 文件源码 项目:automated-reporting 作者: Waikato 项目源码 文件源码 阅读 40 收藏 0 点赞 0 评论 0
def query_to_csv(cursor, cols, outname):
    """
    Turns the query into a CSV file for the GPA calculation.

    :param cursor: the database cursor
    :type cursor: DictCursor
    :param cols: the header names
    :type cols: list
    :param outname: the CSV output filename
    :type outname: str
    """

    logger.info("Generating CSV: {0}".format(outname))
    with open(outname, 'w') as outfile:
        writer = csv.writer(outfile, quoting=csv.QUOTE_NONNUMERIC)
        writer.writerow(cols.split(","))
        for row in cursor.fetchall():
            writer.writerow(row)
        outfile.flush()
    logger.info("Generated CSV ({0}) exists: ".format(outname, os.path.isfile(outname)))
views.py 文件源码 项目:automated-reporting 作者: Waikato 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def index(request):
    # get all schools
    cursor = connection.cursor()
    cursor.execute("""
        SELECT DISTINCT(owning_school_clevel)
        FROM %s
        ORDER BY owning_school_clevel ASC
        """ % GradeResults._meta.db_table)
    schools = list()
    for row in cursor.fetchall():
        schools.append(row[0])
    # configure template
    template = loader.get_template('leave/index.html')
    context = applist.template_context('leave')
    context['schools'] = schools
    context['minimum'] = MINIMUM_DAYS
    return HttpResponse(template.render(context, request))
indexer.py 文件源码 项目:open-ledger 作者: creativecommons 项目源码 文件源码 阅读 36 收藏 0 点赞 0 评论 0
def server_cursor_query(queryset, cursor_id=0, chunk_size=DEFAULT_CHUNK_SIZE):
    connection.cursor()

    compiler = queryset.query.get_compiler(using=queryset.db)
    sql, params = compiler.as_sql()

    model = compiler.klass_info['model']
    select_fields = compiler.klass_info['select_fields']
    fields = [field[0].target.attname
              for field in compiler.select[select_fields[0]:select_fields[-1] + 1]]

    cursor_name = 'cursor-large-%d' % cursor_id
    cursor = connection.connection.cursor(name=cursor_name)
    with transaction.atomic(savepoint=False):
        cursor.execute(sql, params)

        while True:
            rows = cursor.fetchmany(chunk_size)
            if not rows:
                break
            for row in rows:
                DBObj = namedtuple('DBObj', fields)
                obj = DBObj(*row[select_fields[0]:select_fields[-1] + 1])
                yield obj
models.py 文件源码 项目:cocreate 作者: ngageoint 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def unreadForUser(self, user_id):
        from django.db import connection
        cursor = connection.cursor()
        cursor.execute("SELECT n.id, n.added_at, n.owner_id, n.read, n.msg, n.related_model_type, n.related_model_id, n.progress, n.object_name, n.error FROM cocreate_notification n WHERE n.owner_id = ? and n.read = 0", (user_id,))
        result_list = []

        for row in cursor.fetchall():
            p = self.model(
                id = row[0],
                added_at = row[1],
                owner_id = row[2],
                read = row[3],
                msg = row[4],
                related_model_type = row[5],
                related_model_id = row[6],
                progress = row[7],
                object_name = row[8],
                error = row[9]
            )
            result_list.append(p)
        return result_list
sync_utils.py 文件源码 项目:morango 作者: learningequality 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def _dequeuing_delete_rmcb_records(cursor, transfersession_id):
    # delete all RMCBs which are a reverse FF (store version newer than buffer version)
    delete_rmcb_records = """DELETE FROM {rmcb}
                             WHERE model_uuid IN
                             (SELECT rmcb.model_uuid FROM {store} as store, {buffer} as buffer, {rmc} as rmc, {rmcb} as rmcb
                             /*Scope to a single record*/
                             WHERE store.id = buffer.model_uuid
                             AND  store.id = rmc.store_model_id
                             AND  store.id = rmcb.model_uuid
                             /*Checks whether LSB of buffer or less is in RMC of store*/
                             AND buffer.last_saved_instance = rmc.instance_id
                             AND buffer.last_saved_counter <= rmc.counter
                             AND rmcb.transfer_session_id = '{transfer_session_id}'
                             AND buffer.transfer_session_id = '{transfer_session_id}')
                              """.format(buffer=Buffer._meta.db_table,
                                         store=Store._meta.db_table,
                                         rmc=RecordMaxCounter._meta.db_table,
                                         rmcb=RecordMaxCounterBuffer._meta.db_table,
                                         transfer_session_id=transfersession_id)

    cursor.execute(delete_rmcb_records)
sync_utils.py 文件源码 项目:morango 作者: learningequality 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def _dequeuing_delete_buffered_records(cursor, transfersession_id):
    # delete all buffer records which are a reverse FF (store version newer than buffer version)
    delete_buffered_records = """DELETE FROM {buffer}
                                 WHERE model_uuid in
                                 (SELECT buffer.model_uuid FROM {store} as store, {buffer} as buffer, {rmc} as rmc
                                 /*Scope to a single record*/
                                 WHERE store.id = buffer.model_uuid
                                 AND rmc.store_model_id = buffer.model_uuid
                                 /*Checks whether LSB of buffer or less is in RMC of store*/
                                 AND buffer.last_saved_instance = rmc.instance_id
                                 AND buffer.last_saved_counter <= rmc.counter
                                 AND buffer.transfer_session_id = '{transfer_session_id}')
                              """.format(buffer=Buffer._meta.db_table,
                                         store=Store._meta.db_table,
                                         rmc=RecordMaxCounter._meta.db_table,
                                         rmcb=RecordMaxCounterBuffer._meta.db_table,
                                         transfer_session_id=transfersession_id)
    cursor.execute(delete_buffered_records)
sync_utils.py 文件源码 项目:morango 作者: learningequality 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def _dequeuing_merge_conflict_rmcb(cursor, transfersession_id):
    # transfer record max counters for records with merge conflicts + perform max
    merge_conflict_rmc = """REPLACE INTO {rmc} (instance_id, counter, store_model_id)
                                SELECT rmcb.instance_id, rmcb.counter, rmcb.model_uuid
                                FROM {rmcb} AS rmcb, {store} AS store, {rmc} AS rmc, {buffer} AS buffer
                                /*Scope to a single record.*/
                                WHERE store.id = rmcb.model_uuid
                                AND store.id = rmc.store_model_id
                                AND store.id = buffer.model_uuid
                                /*Where buffer rmc is greater than store rmc*/
                                AND rmcb.instance_id = rmc.instance_id
                                AND rmcb.counter > rmc.counter
                                AND rmcb.transfer_session_id = '{transfer_session_id}'
                                /*Exclude fast-forwards*/
                                AND NOT EXISTS (SELECT 1 FROM {rmcb} AS rmcb2 WHERE store.id = rmcb2.model_uuid
                                                                              AND store.last_saved_instance = rmcb2.instance_id
                                                                              AND store.last_saved_counter <= rmcb2.counter
                                                                              AND rmcb2.transfer_session_id = '{transfer_session_id}')
                           """.format(buffer=Buffer._meta.db_table,
                                      store=Store._meta.db_table,
                                      rmc=RecordMaxCounter._meta.db_table,
                                      rmcb=RecordMaxCounterBuffer._meta.db_table,
                                      transfer_session_id=transfersession_id)
    cursor.execute(merge_conflict_rmc)
sync_utils.py 文件源码 项目:morango 作者: learningequality 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def _dequeuing_merge_conflict_buffer(cursor, current_id, transfersession_id):
    # transfer buffer serialized into conflicting store
    merge_conflict_store = """REPLACE INTO {store} (id, serialized, deleted, last_saved_instance, last_saved_counter, model_name,
                                                    profile, partition, source_id, conflicting_serialized_data, dirty_bit, _self_ref_fk)
                                        SELECT store.id, store.serialized, store.deleted OR buffer.deleted, '{current_instance_id}',
                                               {current_instance_counter}, store.model_name, store.profile, store.partition, store.source_id,
                                               buffer.serialized || '\n' || store.conflicting_serialized_data, 1, store._self_ref_fk
                                        FROM {buffer} AS buffer, {store} AS store
                                        /*Scope to a single record.*/
                                        WHERE store.id = buffer.model_uuid
                                        AND buffer.transfer_session_id = '{transfer_session_id}'
                                        /*Exclude fast-forwards*/
                                        AND NOT EXISTS (SELECT 1 FROM {rmcb} AS rmcb2 WHERE store.id = rmcb2.model_uuid
                                                                                      AND store.last_saved_instance = rmcb2.instance_id
                                                                                      AND store.last_saved_counter <= rmcb2.counter
                                                                                      AND rmcb2.transfer_session_id = '{transfer_session_id}')
                                  """.format(buffer=Buffer._meta.db_table,
                                             rmcb=RecordMaxCounterBuffer._meta.db_table,
                                             store=Store._meta.db_table,
                                             rmc=RecordMaxCounter._meta.db_table,
                                             transfer_session_id=transfersession_id,
                                             current_instance_id=current_id.id,
                                             current_instance_counter=current_id.counter)
    cursor.execute(merge_conflict_store)
sync_utils.py 文件源码 项目:morango 作者: learningequality 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def _dequeuing_delete_mc_buffer(cursor, transfersession_id):
    # delete records with merge conflicts from buffer
    delete_mc_buffer = """DELETE FROM {buffer}
                                WHERE EXISTS
                                (SELECT 1 FROM {store} AS store, {buffer} AS buffer
                                /*Scope to a single record.*/
                                WHERE store.id = {buffer}.model_uuid
                                AND {buffer}.transfer_session_id = '{transfer_session_id}'
                                /*Exclude fast-forwards*/
                                AND NOT EXISTS (SELECT 1 FROM {rmcb} AS rmcb2 WHERE store.id = rmcb2.model_uuid
                                                                              AND store.last_saved_instance = rmcb2.instance_id
                                                                              AND store.last_saved_counter <= rmcb2.counter
                                                                              AND rmcb2.transfer_session_id = '{transfer_session_id}'))
                           """.format(buffer=Buffer._meta.db_table,
                                      store=Store._meta.db_table,
                                      rmc=RecordMaxCounter._meta.db_table,
                                      rmcb=RecordMaxCounterBuffer._meta.db_table,
                                      transfer_session_id=transfersession_id)
    cursor.execute(delete_mc_buffer)
sync_utils.py 文件源码 项目:morango 作者: learningequality 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def _dequeuing_delete_mc_rmcb(cursor, transfersession_id):
    # delete rmcb records with merge conflicts
    delete_mc_rmc = """DELETE FROM {rmcb}
                                WHERE EXISTS
                                (SELECT 1 FROM {store} AS store, {rmc} AS rmc
                                /*Scope to a single record.*/
                                WHERE store.id = {rmcb}.model_uuid
                                AND store.id = rmc.store_model_id
                                /*Where buffer rmc is greater than store rmc*/
                                AND {rmcb}.instance_id = rmc.instance_id
                                AND {rmcb}.transfer_session_id = '{transfer_session_id}'
                                /*Exclude fast fast-forwards*/
                                AND NOT EXISTS (SELECT 1 FROM {rmcb} AS rmcb2 WHERE store.id = rmcb2.model_uuid
                                                                              AND store.last_saved_instance = rmcb2.instance_id
                                                                              AND store.last_saved_counter <= rmcb2.counter
                                                                              AND rmcb2.transfer_session_id = '{transfer_session_id}'))
                           """.format(buffer=Buffer._meta.db_table,
                                      store=Store._meta.db_table,
                                      rmc=RecordMaxCounter._meta.db_table,
                                      rmcb=RecordMaxCounterBuffer._meta.db_table,
                                      transfer_session_id=transfersession_id)
    cursor.execute(delete_mc_rmc)
sync_utils.py 文件源码 项目:morango 作者: learningequality 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def _dequeue_into_store(transfersession):
    """
    Takes data from the buffers and merges into the store and record max counters.
    """
    with connection.cursor() as cursor:
        _dequeuing_delete_rmcb_records(cursor, transfersession.id)
        _dequeuing_delete_buffered_records(cursor, transfersession.id)
        current_id = InstanceIDModel.get_current_instance_and_increment_counter()
        _dequeuing_merge_conflict_buffer(cursor, current_id, transfersession.id)
        _dequeuing_merge_conflict_rmcb(cursor, transfersession.id)
        _dequeuing_update_rmcs_last_saved_by(cursor, current_id, transfersession.id)
        _dequeuing_delete_mc_rmcb(cursor, transfersession.id)
        _dequeuing_delete_mc_buffer(cursor, transfersession.id)
        _dequeuing_insert_remaining_buffer(cursor, transfersession.id)
        _dequeuing_insert_remaining_rmcb(cursor, transfersession.id)
        _dequeuing_delete_remaining_rmcb(cursor, transfersession.id)
        _dequeuing_delete_remaining_buffer(cursor, transfersession.id)
    if getattr(settings, 'MORANGO_DESERIALIZE_AFTER_DEQUEUING', True):
        _deserialize_from_store(transfersession.sync_session.profile)
models.py 文件源码 项目:morango 作者: learningequality 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def calculate_filter_max_counters(cls, filters):

        # create string of prefixes to place into sql statement
        condition = " UNION ".join(["SELECT '{}' AS a".format(prefix) for prefix in filters])

        filter_max_calculation = """
        SELECT PMC.instance, MIN(PMC.counter)
        FROM
            (
            SELECT dmc.instance_id as instance, MAX(dmc.counter) as counter, filter as filter_partition
            FROM {dmc_table} as dmc, (SELECT T.a as filter FROM ({filter_list}) as T)
            WHERE filter LIKE dmc.partition || '%'
            GROUP BY instance, filter_partition
            ) as PMC
        GROUP BY PMC.instance
        HAVING {count} = COUNT(PMC.filter_partition)
        """.format(dmc_table=cls._meta.db_table,
                   filter_list=condition,
                   count=len(filters))

        with connection.cursor() as cursor:
            cursor.execute(filter_max_calculation)
            return dict(cursor.fetchall())
apps.py 文件源码 项目:kolibri 作者: learningequality 项目源码 文件源码 阅读 32 收藏 0 点赞 0 评论 0
def activate_pragmas_per_connection(sender, connection, **kwargs):
        """
        Activate SQLite3 PRAGMAs that apply on a per-connection basis. A no-op
        right now, but kept around as infrastructure if we ever want to add
        PRAGMAs in the future.
        """

        if connection.vendor == "sqlite":
            cursor = connection.cursor()

            # Shorten the default WAL autocheckpoint from 1000 pages to 500
            cursor.execute(CONNECTION_PRAGMAS)

            # We don't turn on the following pragmas, because they have negligible
            # performance impact. For reference, here's what we've tested:

            # Don't ensure that the OS has fully flushed
            # our data to disk.
            # cursor.execute("PRAGMA synchronous=OFF;")

            # Store cross-database JOINs in memory.
            # cursor.execute("PRAGMA temp_store=MEMORY;")
apps.py 文件源码 项目:kolibri 作者: learningequality 项目源码 文件源码 阅读 29 收藏 0 点赞 0 评论 0
def activate_pragmas_on_start():
        """
        Activate a set of PRAGMAs that apply to the database itself,
        and not on a per connection basis.
        :return:
        """
        from django.db import connection

        if connection.vendor == "sqlite":
            cursor = connection.cursor()

            # http://www.sqlite.org/wal.html
            # WAL's main advantage allows simultaneous reads
            # and writes (vs. the default exclusive write lock)
            # at the cost of a slight penalty to all reads.
            cursor.execute(START_PRAGMAS)
metrics.py 文件源码 项目:intel-manager-for-lustre 作者: intel-hpdd 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def get_stats_size(self):
        stats_size = LazyStruct()
        from django.db import connection
        cursor = connection.cursor()
        if 'postgres' in connection.settings_dict['ENGINE']:
            stats_size.row_count = stats_size.data = stats_size.index = 0

            for model in Stats:
                cursor.execute("select count(id) as rows, pg_relation_size('{0}') as data_length, pg_total_relation_size('{0}') - pg_relation_size('{0}') as index_length from {0}".format(model._meta.db_table))
                rows, data, index = cursor.fetchone()
                stats_size.row_count += rows
                stats_size.data += data
                stats_size.index += index
        else:
            raise RuntimeError("Unsupported DB: %s" % connection.settings_dict['ENGINE'])
        return stats_size
system_status.py 文件源码 项目:intel-manager-for-lustre 作者: intel-hpdd 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def _get_table(table, silence_columns = []):
    "Returns all rows from a cursor as a dict"

    cursor = connection.cursor()
    cursor.execute("SELECT * FROM %s;" % table)
    desc = cursor.description

    select_columns = []
    columns = [col[0] for col in desc]
    for i, col_name in enumerate(columns):
        if col_name not in silence_columns:
            select_columns.append(i)

    return {
        'columns': [columns[i] for i in select_columns],
        'rows': [
            [row[i] for i in select_columns] for row in cursor.fetchall()
        ]
    }
0008_to_version_1.py 文件源码 项目:vaultier 作者: Movile 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def _migrate_cards(vault_node):
    cursor = connection.cursor()
    cursor.execute("SELECT * FROM vaultier_card WHERE vault_id = %s",
                   [vault_node._vault])

    for c in _dictfetchall(cursor):
        node = Node(
            name=c['name'],
            meta=json.dumps({'description': c['description']}),
            created_by_id=c['created_by_id'],
            parent=vault_node,
            type=1
        )
        node._card = c['id']
        node.save(force_insert=True)
        _migrate_secret(node)
        _migrate_acl('card', c['id'], node)
0008_to_version_1.py 文件源码 项目:vaultier 作者: Movile 项目源码 文件源码 阅读 18 收藏 0 点赞 0 评论 0
def migrate_from_workspaces(apps, schema_editor):

    if not workspace_exists():
        return

    cursor = connection.cursor()
    cursor.execute("ALTER TABLE public.vaultier_member ALTER COLUMN workspace_id DROP NOT NULL;")
    cursor.execute("SELECT * FROM vaultier_workspace")
    nodes = []
    for w in _dictfetchall(cursor):
        node = Node(
            name=w['name'],
            meta=json.dumps({'description': w['description']}),
            created_by_id=w['created_by_id'],
            type=1


        )
        node.save()
        node.acl.initialize_node()
        node._workspace = w['id']
        nodes.append(node)
        _migrate_members(node)
        _migrate_vaults(node)
        _migrate_acl('workspace', w['id'], node)
apps.py 文件源码 项目:django-icekit 作者: ic-labs 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def update_site(sender, **kwargs):
    """
    Update `Site` object matching `SITE_ID` setting with `SITE_DOMAIN` and
    `SITE_PORT` settings.
    """
    Site = apps.get_model('sites', 'Site')
    domain = settings.SITE_DOMAIN
    if settings.SITE_PORT:
        domain += ':%s' % settings.SITE_PORT
    Site.objects.update_or_create(
        pk=settings.SITE_ID,
        defaults=dict(
            domain=domain,
            name=settings.SITE_NAME))

    # We set an explicit pk instead of relying on auto-incrementation,
    # so we need to reset the database sequence.
    sequence_sql = connection.ops.sequence_reset_sql(no_style(), [Site])
    if sequence_sql:
        cursor = connection.cursor()
        for command in sequence_sql:
            cursor.execute(command)


问题


面经


文章

微信
公众号

扫码关注公众号