python类insert()的实例源码

sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def upsert_guild(self, trans, guild):
        values = guild_values(guild)
        if self.guild_cache.get(guild.id) == values:
            self.logger.debug(f"Guild lookup for {guild.id} is already up-to-date")
            return

        self.logger.info(f"Updating lookup data for guild {guild.name}")
        ups = p_insert(self.tb_guilds) \
                .values(values) \
                .on_conflict_do_update(
                        index_elements=['guild_id'],
                        index_where=(self.tb_guilds.c.guild_id == guild.id),
                        set_=values,
                )
        trans.conn.execute(ups)
        self.guild_cache[guild.id] = values

    # Messages
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def insert_message(self, trans, message):
        values = message_values(message)
        if self.message_cache.get(message.id) == values:
            self.logger.debug(f"Message lookup for {message.id} is already up-to-date")
            return

        self.logger.debug(f"Inserting message {message.id}")
        ins = p_insert(self.tb_messages) \
                .values(values) \
                .on_conflict_do_nothing(index_elements=['message_id'])
        trans.execute(ins)
        self.message_cache[message.id] = values

        self.upsert_user(trans, message.author)
        self.insert_mentions(trans, message)

    # Mentions
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def upsert_role(self, trans, role):
        values = role_values(role)
        if self.role_cache.get(role.id) == values:
            self.logger.debug(f"Role lookup for {role.id} is already up-to-date")
            return

        self.logger.debug(f"Updating lookup data for role {role.name}")
        ups = p_insert(self.tb_roles) \
                .values(values) \
                .on_conflict_do_update(
                        index_elements=['role_id'],
                        index_where=(self.tb_roles.c.role_id == role.id),
                        set_=values,
                )
        trans.execute(ups)
        self.role_cache[role.id] = values

    # Channels
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def upsert_voice_channel(self, trans, channel):
        values = voice_channel_values(channel)
        if self.voice_channel_cache.get(channel.id) == values:
            self.logger.debug(f"Voice channel lookup for {channel.id} is already up-to-date")
            return

        self.logger.debug(f"Updating lookup data for voice channel '{channel.name}'")
        ups = p_insert(self.tb_voice_channels) \
                .values(values) \
                .on_conflict_do_update(
                        index_elements=['voice_channel_id'],
                        index_where=(self.tb_voice_channels.c.voice_channel_id == channel.id),
                        set_=values,
                )
        trans.execute(ups)
        self.voice_channel_cache[channel.id] = values

    # Channel Categories
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def upsert_channel_category(self, trans, category):
        values = channel_categories_values(category)
        if self.channel_cache.get(category.id) == values:
            self.logger.debug(f"Channel category lookup for {category.id} is already up-to-date")
            return

        self.logger.debug(f"Updating lookup data for channel category {category.name}")
        ups = p_insert(self.tb_channel_categories) \
                .values(values) \
                .on_conflict_do_update(
                        index_elements=['category_id'],
                        index_where=(self.tb_channel_categories.c.category_id == category.id),
                        set_=values,
                )
        trans.execute(ups)
        self.channel_category_cache[category.id] = values

    # Users
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def upsert_user(self, trans, user):
        self.logger.debug(f"Upserting user {user.id}")
        values = user_values(user)
        if self.user_cache.get(user.id) == values:
            self.logger.debug(f"User lookup for {user.id} is already up-to-date")
            return

        ups = p_insert(self.tb_users) \
                .values(values) \
                .on_conflict_do_update(
                        index_elements=['user_id'],
                        index_where=(self.tb_users.c.user_id == user.id),
                        set_=values,
                )
        trans.execute(ups)
        self.user_cache[user.id] = values

    # Members
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def upsert_emoji(self, trans, emoji):
        data = EmojiData(emoji)
        values = data.values()
        if self.emoji_cache.get(data.cache_id) == values:
            self.logger.debug(f"Emoji lookup for {data} is already up-to-date")
            return

        self.logger.debug(f"Upserting emoji {data}")
        ups = p_insert(self.tb_emojis) \
                .values(values) \
                .on_conflict_do_update(
                    index_elements=['emoji_id', 'emoji_unicode'],
                    index_where=and_(
                        self.tb_emojis.c.emoji_id == data.id,
                        self.tb_emojis.c.emoji_unicode == data.unicode,
                    ),
                    set_=values,
                )
        trans.execute(ups)
        self.emoji_cache[data.cache_id] = values

    # Audit log
db.py 文件源码 项目:yo 作者: steemit 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def create_users(self, usernames):
        usernames = list(set(usernames))
        logger.debug('creating users', username_count=len(usernames))
        if not usernames:
            return True

        if self.backend == 'postgres':
            create_stmt = insert(user_settings_table). \
                on_conflict_do_nothing(index_elements=['username'])
        else:
            create_stmt = user_settings_table.insert(). \
                prefix_with('OR IGNORE')

        results = []
        async with self.async_engine.acquire() as conn:
            for username in usernames:
                try:
                    results.append(await conn.execute(
                        create_stmt.values(username=username)))
                except BaseException:
                    logger.exception(
                        'create_users failed', usernames=usernames, exc_info=True)
                    results.append(False)
        return results
db.py 文件源码 项目:yo 作者: steemit 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def register_service(self, conn, service_name):
        logger.info('registering service', service_name=service_name)
        tbl = services_table

        # add service to services table
        create_service_tx = conn.begin()
        create_service_stmt = tbl.insert().values(service_name=service_name)
        result = conn.execute(create_service_stmt)
        service_id = result.inserted_primary_key[0]
        create_service_tx.commit()

        result = Registration(
            service_name=service_name,
            service_id=service_id,
            service_status=ServiceState.DISABLED,
            service_extra={})
        logger.info('service registered', registration=result)
        return result
    # pylint: enable=no-self-use
dml.py 文件源码 项目:pgawedge 作者: portfoliome 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def upsert_primary_key_statement(table: Table):
    """Insert data when primary key doesn't exist, else update."""

    ins = insert(table)
    constraint = table.primary_key.columns.keys()
    non_pkey_columns = set(
        c.name for c in table.columns.values() if c.primary_key is False
    )

    if non_pkey_columns:
        exclude = {
            k: v for k, v in ins.excluded.items() if k in non_pkey_columns
        }

        statement = ins.on_conflict_do_update(
            index_elements=constraint, set_=exclude
        )
    else:
        statement = ins.on_conflict_do_nothing(index_elements=constraint)

    return statement
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 28 收藏 0 点赞 0 评论 0
def insert_reaction(self, trans, reaction, users):
        self.logger.info(f"Inserting past reactions for {reaction.message.id}")
        self.upsert_emoji(trans, reaction.emoji)
        data = EmojiData(reaction.emoji)
        for user in users:
            self.upsert_user(trans, user)
            values = reaction_values(reaction, user, False)
            self.logger.debug(f"Inserting single reaction {data} from {user.id}")
            ins = p_insert(self.tb_reactions) \
                    .values(values) \
                    .on_conflict_do_nothing(index_elements=[
                        'message_id', 'emoji_id', 'emoji_unicode', 'user_id', 'created_at',
                    ])
            trans.execute(ins)
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 27 收藏 0 点赞 0 评论 0
def upsert_member(self, trans, member):
        self.logger.debug(f"Upserting member data for {member.id}")
        values = guild_member_values(member)
        ups = p_insert(self.tb_guild_membership) \
                .values(values) \
                .on_conflict_do_update(
                        constraint='uq_guild_membership',
                        set_=values,
                )
        trans.execute(ups)

        self._delete_role_membership(trans, member)
        self._insert_role_membership(trans, member)

    # Emojis
main.py 文件源码 项目:kort-core 作者: kort 项目源码 文件源码 阅读 19 收藏 0 点赞 0 评论 0
def add_errors_from_query(mission_type, elements):
    for element in elements:
        if element.get('type') == 'node':
            lon = element.get('lon')
            lat = element.get('lat')
        if element.get('type') == 'way' or element.get('type') == 'relation':
            center = element.get('center')
            if center:
                lon = center.get('lon')
                lat = center.get('lat')
            else:
                continue
        geom = 'SRID=4326;POINT(' + str(lon) + ' ' + str(lat) + ')'
        lon *= 10000000
        lat *= 10000000
        osmId = element.get('id')
        stmt = insert(osm_error).values(
            error_type_id=overpass_queries.mission_type_ids.get(mission_type), object_id=osmId,
            object_type=element.get('type'), error_name=mission_type,
            lat=lat, lon=lon, geom=geom, txt1=element.get('tags').get('name')
        )
        stmt = stmt.on_conflict_do_update(
            constraint=UniqueConstraint(osm_error.error_type_id, osm_error.object_type, osm_error.object_id),
            set_=dict(
                error_type_id=overpass_queries.mission_type_ids.get(mission_type), object_id=osmId,
                object_type=element.get('type'), error_name=mission_type,
                lat=lat, lon=lon, geom=geom, txt1=element.get('tags').get('name')
            )
        )
        db_session.execute(stmt)
    db_session.commit()
load.py 文件源码 项目:data-hub-backend 作者: uktrade-attic 项目源码 文件源码 阅读 33 收藏 0 点赞 0 评论 0
def to_sqla_table(table, data):
    'Load data into an SQLA table'
    if not isinstance(data, (list, map, filter)):
        raise Exception('`data` arg is not a list, map or filter object')
    results = []
    for chunk in itertools.zip_longest(*[iter(data)] * 5000):
        results.append(
            table.metadata.bind.execute(
                table.insert().values(list(filter(None, chunk)))
            )
        )
    return results
load.py 文件源码 项目:data-hub-backend 作者: uktrade-attic 项目源码 文件源码 阅读 23 收藏 0 点赞 0 评论 0
def to_sqla_table_idempotent(table, data):
    '''
    Idempotently load data into an SQLA table, temporarily write out details on
    integrity errors to a file
    '''
    if not isinstance(data, (list, map, filter)):
        raise Exception('`data` arg is not a list, map or filter object')
    primary_key = etl.utils.primary_key(table)
    results = []
    missing = collections.defaultdict(set)
    for row in data:
        upsert = insert(table)\
            .values(**row)\
            .on_conflict_do_update(index_elements=[primary_key], set_=row)
        try:
            results.append(table.metadata.bind.execute(upsert))
        except sqla_exc.IntegrityError as exc:
            parsed = re.search(INTEGRITY_DETAILS, str(exc))
            if parsed:
                missing[table.name].add(row[primary_key])
                missing[parsed.group('table')].add(parsed.group('pkey'))
                continue
            LOGGER.error(
                '%s %s (%s) failed on :',
                datetime.datetime.now(), table.name, row[primary_key]
            )
            LOGGER.error(str(exc).split('\n')[0])
    return results, missing
load.py 文件源码 项目:data-hub-backend 作者: uktrade-attic 项目源码 文件源码 阅读 31 收藏 0 点赞 0 评论 0
def from_ch(data):
    metadata = services.db.get_django_metadata()
    table = metadata.tables['company_companieshousecompany']
    return metadata.bind.connect().execute(table.insert(), data)
main.py 文件源码 项目:data-hub-backend 作者: uktrade-attic 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def insert_or_report(execute_fn, table, rows):
    try:
        execute_fn(insert(table).values(rows))
    except Exception as exc:
        print(rows)
        print(exc)
        return
    LOGGER.info(
        '%s Inserted %s rows',
        datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
        len(rows)
    )
db.py 文件源码 项目:yo 作者: steemit 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def __create_generic_notification(self, table=None, **notification):
        with self.acquire_conn() as conn:
            tx = conn.begin()
            try:
                result = conn.execute(table.insert(), **notification)

                tx.commit()
                logger.debug(
                    'notification_stored',
                    nid=result.inserted_primary_key,
                    notify_type=notification.get('notify_type)'))
                return True
            except (IntegrityError, SQLiteIntegrityError) as e:
                if is_duplicate_entry_error(e):
                    logger.debug(
                        '__create_generic_notification ignoring duplicate entry error')
                    return True
                else:
                    logger.exception('__create_generic_notification failed',
                                     **notification)
                    tx.rollback()
                    return False
            except BaseException:
                tx.rollback()
                logger.exception('__create_generic_notification failed', **notification)

            return False
db.py 文件源码 项目:yo 作者: steemit 项目源码 文件源码 阅读 26 收藏 0 点赞 0 评论 0
def create_notifications(self, notifications):
        results = []

        # create non-existant users before creating notifications
        usernames = []
        for notification in notifications:
            usernames.append(notification['to_username'])
            usernames.append(notification.get('from_username'))
        usernames = set(u for u in usernames if u)

        results.append(await self.create_users(usernames))

        # group notifications by keys to allow multi-row inserts
        # grouped_notifications = toolz.groupby(lambda x: tuple(x.keys()),
        #                                      notifications)
        # logger.debug('create_notifications',
        #             notification_count=len(notifications),
        #             group_count=len(grouped_notifications.keys()))
        #futures = []

        wwwpoll_columns = set(c.name for c in wwwpoll_table.c._all_columns)
        async with self.async_engine.acquire() as conn:
            for n in notifications:
                results.append(await
                               conn.execute(notifications_table.insert().values(**n)))
                n2 = toolz.keyfilter(lambda k: k in wwwpoll_columns, n)
                results.append(await conn.execute(wwwpoll_table.insert().values(**n2)))
        return all(results)

    # notification retrieval methods

    # pylint: disable=too-many-arguments,too-many-locals
db.py 文件源码 项目:yo 作者: steemit 项目源码 文件源码 阅读 25 收藏 0 点赞 0 评论 0
def create_user(self, username, transports=None):
        logger.info('creating user', username=username, transports=transports)

        user_settings_data = {'username': username}
        if transports:
            if isinstance(transports, dict):
                transports = ujson.dumps(transports)
            user_settings_data.update({'transports': transports})

        with self.acquire_conn() as conn:
            try:
                stmt = user_settings_table.insert(values=user_settings_data)
                result = conn.execute(stmt)
                if result.inserted_primary_key:
                    logger.info('user created', username=username)
                    return True
            except (IntegrityError, SQLiteIntegrityError) as e:
                if is_duplicate_entry_error(e):
                    logger.debug('create_user ignoring duplicate entry error')
                    return True
                else:
                    logger.exception(
                        'create_user failed',
                        username=username,
                        transports=transports,
                        exc_info=True)
                    return False
            except BaseException:
                logger.exception(
                    'create_user failed',
                    username=username,
                    transports=transports,
                    exc_info=True)
        return False
db.py 文件源码 项目:yo 作者: steemit 项目源码 文件源码 阅读 21 收藏 0 点赞 0 评论 0
def create_action(self, notification, transport, status=None):
        with self.acquire_conn() as conn:
            create_action_stmt = actions_table.insert().values(
                nid=notification['nid'], transport=transport, status=status)
            result = conn.execute(create_action_stmt)
            return result.inserted_primary_key
stores.py 文件源码 项目:microcosm-eventsource 作者: globality-corp 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def upsert_on_index_elements(self, instance):
        """
        Upsert an event by index elements.

        Uses ON CONFLICT ... DO NOTHING to handle uniqueness constraint violations without
        invalidating the current transactions completely.

        Depends on an unique constraint on index elements to find the resulting entry.

        """
        with self.flushing():
            insert_statement = insert(self.model_class).values(
                instance._members(),
            )
            upsert_statement = insert_statement.on_conflict_do_nothing(
                index_elements=self.upsert_index_elements(),
            )
            self.session.execute(upsert_statement)

        most_recent = self._retrieve_most_recent(
            *[
                getattr(self.model_class, elem) == getattr(instance, elem)
                for elem in self.upsert_index_elements()
            ]
        )

        if not most_recent.is_similar_to(instance):
            raise ConcurrentStateConflictError()

        return most_recent
repos.py 文件源码 项目:provenance 作者: bmabey 项目源码 文件源码 阅读 22 收藏 0 点赞 0 评论 0
def _upsert_run(self, session, info):
        sql = pg.insert(db.Run).values(
            id=info['id'], info=info,
            hostname=info['host']['nodename'],
            created_at=info['created_at']
        ).on_conflict_do_nothing(index_elements=['id'])

        session.execute(sql)

        return db.Run(info)
dml.py 文件源码 项目:pgawedge 作者: portfoliome 项目源码 文件源码 阅读 20 收藏 0 点赞 0 评论 0
def compile_insert(table: Table, column_names=None):
    """Sqlalchemy insert statement generator with Pyformat."""

    return insert(table).compile(dialect=PG_DIALECT, column_keys=column_names)
views.py 文件源码 项目:scrobbler 作者: hatarist 项目源码 文件源码 阅读 49 收藏 0 点赞 0 评论 0
def scrobble():
    session_id, scrobbles = parse_scrobble_request(request.form)
    if not session_id:
        return api_response('BADREQUEST'), 400

    session = db.session.query(Session).filter(Session.session_id == session_id).first()

    for data in scrobbles:
        artist = db.session.query(Artist).filter(Artist.name == data['artist']).first()
        artist_id = None
        album_id = None

        if artist:
            artist_id = artist.id
            artist.local_playcount += 1

            album = db.session.query(Album).filter(
                Album.artist_id == artist_id,
                Album.name == data['album']
            ).first()

            if album:
                album_id = album.id
                album.local_playcount += 1

        # PG 9.5+: DO NOTHING if duplicate
        query = insert(Scrobble).values(
            user_id=session.user_id,
            token_id=session.token_id,
            played_at=data.pop('timestamp'),
            artist_id=artist_id,
            album_id=album_id,
            **data
        ).on_conflict_do_nothing(
            index_elements=['user_id', 'played_at', 'artist', 'track']
        )
        db.session.execute(query)
        # PG <9.5
        # scrobble = Scrobble(
        #     user_id=session.user_id,
        #     played_at=data.pop('timestamp'),
        #     artist_id=artist_id,
        #     **data
        # )
        # db.session.add(scrobble)

    db.session.commit()

    return api_response('OK')
sql.py 文件源码 项目:statbot 作者: strinking 项目源码 文件源码 阅读 24 收藏 0 点赞 0 评论 0
def insert_mentions(self, trans, message):
        self.logger.debug(f"Inserting all mentions in message {message.id}")

        for id in message.raw_mentions:
            if id > MAX_ID:
                self.logger.error(f"User mention was too long: {id}")
                continue

            self.logger.debug(f"User mention: {id}")
            ins = p_insert(self.tb_mentions) \
                    .values({
                        'mentioned_id': id,
                        'type': MentionType.USER,
                        'message_id': message.id,
                        'channel_id': message.channel.id,
                        'guild_id': message.guild.id,
                    }) \
                    .on_conflict_do_nothing(index_elements=['mentioned_id', 'type', 'message_id'])
            trans.execute(ins)

        for id in message.raw_role_mentions:
            if id > MAX_ID:
                self.logger.error(f"Role mention was too long: {id}")
                continue

            self.logger.debug(f"Role mention: {id}")
            ins = p_insert(self.tb_mentions) \
                    .values({
                        'mentioned_id': id,
                        'type': MentionType.ROLE,
                        'message_id': message.id,
                        'channel_id': message.channel.id,
                        'guild_id': message.guild.id,
                    }) \
                    .on_conflict_do_nothing(index_elements=['mentioned_id', 'type', 'message_id'])
            trans.execute(ins)

        for id in message.raw_channel_mentions:
            if id > MAX_ID:
                self.logger.error(f"Channel mention was too long: {id}")
                continue

            self.logger.debug(f"Channel mention: {id}")
            ins = p_insert(self.tb_mentions) \
                    .values({
                        'mentioned_id': id,
                        'type': MentionType.CHANNEL,
                        'message_id': message.id,
                        'channel_id': message.channel.id,
                        'guild_id': message.guild.id,
                    }) \
                    .on_conflict_do_nothing(index_elements=['mentioned_id', 'type', 'message_id'])
            trans.execute(ins)

    # Typing


问题


面经


文章

微信
公众号

扫码关注公众号