def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'environment_hierarchy_level_value'
with op.batch_alter_table(table_name) as batch:
batch.drop_column('parent_id')
batch.drop_constraint(
table_name + '_level_id_fkey',
type_='foreignkey'
)
batch.create_foreign_key(
table_name + '_level_id_fkey',
table_prefix + 'environment_hierarchy_level',
['level_id'], ['id'], ondelete='CASCADE'
)
batch.create_unique_constraint(
table_name + '_level_id_value_unique',
['level_id', 'value']
)
python类batch_alter_table()的实例源码
a86472389a70_remove_hierarchy_for_level_values.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 28
收藏 0
点赞 0
评论 0
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
op.drop_table(table_prefix + 'template')
table_name = table_prefix + 'environment_schema_values'
with op.batch_alter_table(table_name) as batch:
batch.drop_constraint(table_name + '_schema_id_fkey', 'foreignkey')
batch.alter_column(
'schema_id',
new_column_name='resource_definition_id',
existing_type=sa.Integer(),
)
op.rename_table(table_name, table_prefix + 'resource_values')
op.rename_table(table_prefix + 'schema',
table_prefix + 'resource_definition')
with op.batch_alter_table(table_prefix + 'resource_definition') as batch:
batch.drop_column('namespace_id')
op.drop_table(table_prefix + 'namespace')
table_name = table_prefix + 'resource_values'
with op.batch_alter_table(table_name) as batch:
batch.create_foreign_key(
table_name + '_resource_definition_id_fkey',
table_prefix + 'resource_definition',
['resource_definition_id'],
['id'],
)
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('schedule_info',
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('last_changed_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('schedule_meta',
sa.Column('parent_id', sa.Integer(), nullable=False),
sa.Column('last_run_at', sa.DateTime(), nullable=True),
sa.Column('total_run_count', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['parent_id'], ['schedule_task.id'], ),
sa.PrimaryKeyConstraint('parent_id')
)
with op.batch_alter_table('schedule_task') as batch_op:
batch_op.drop_column('total_run_count')
batch_op.drop_column('last_run_at')
# ### end Alembic commands ###
def create_foreign_key(
self, name, referent, local_cols, remote_cols, **kw):
"""Issue a "create foreign key" instruction using the
current batch migration context.
The batch form of this call omits the ``source`` and ``source_schema``
arguments from the call.
e.g.::
with batch_alter_table("address") as batch_op:
batch_op.create_foreign_key(
"fk_user_address",
"user", ["user_id"], ["id"])
.. seealso::
:meth:`.Operations.create_foreign_key`
"""
return super(BatchOperations, self).create_foreign_key(
name, self.impl.table_name, referent, local_cols, remote_cols,
source_schema=self.impl.schema, **kw)
3a37e844b277_alter_constraints.py 文件源码
项目:cci-demo-flask
作者: circleci
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'last_name')
op.drop_column('users', 'first_name')
### end Alembic commands ###
with op.batch_alter_table("notes") as batch_op:
batch_op.drop_constraint(
"notes_author_id_fkey", type_="foreignkey")
with op.batch_alter_table("notebooks") as batch_op:
batch_op.drop_constraint(
"notebooks_author_id_fkey", type_="foreignkey")
op.create_foreign_key(
"notes_author_id_fkey", "notes", "users",
["author_id"], ["id"], ondelete="CASCADE")
op.create_foreign_key(
"notebooks_author_id_fkey", "notebooks", "users",
["author_id"], ["id"], ondelete="CASCADE")
d054eefc4c5b_add_unique_constraint_on_component_name.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 23
收藏 0
点赞 0
评论 0
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'component'
with op.batch_alter_table(table_name) as batch:
batch.create_unique_constraint(
table_name + '_component_name_unique',
['name'],
)
d054eefc4c5b_add_unique_constraint_on_component_name.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def downgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'component'
with op.batch_alter_table(table_name) as batch:
batch.drop_constraint(
table_name + '_component_name_unique',
type_='unique',
)
967a44dd16d5_add_server_default_to_resource_values_values.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'resource_values'
with op.batch_alter_table(table_name) as batch:
batch.alter_column(
'values',
server_default='{}',
existing_type=tuning_box.db.Json(),
)
967a44dd16d5_add_server_default_to_resource_values_values.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def downgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'resource_values'
with op.batch_alter_table(table_name) as batch:
batch.alter_column(
'values',
server_default=None,
existing_type=tuning_box.db.Json(),
)
adf671eddeb4_level_cascade_deletion.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 18
收藏 0
点赞 0
评论 0
def upgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'environment_hierarchy_level'
with op.batch_alter_table(table_name) as batch:
batch.drop_constraint(
table_prefix + 'environment_hierarchy_level_environment_id_fkey',
type_='foreignkey'
)
batch.create_foreign_key(
table_prefix + 'environment_hierarchy_level_environment_id_fkey',
table_prefix + 'environment',
['environment_id'], ['id'], ondelete='CASCADE'
)
adf671eddeb4_level_cascade_deletion.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def downgrade():
table_prefix = context.config.get_main_option('table_prefix')
table_name = table_prefix + 'environment_hierarchy_level'
with op.batch_alter_table(table_name) as batch:
batch.drop_constraint(
table_prefix + 'environment_hierarchy_level_environment_id_fkey',
type_='foreignkey'
)
batch.create_foreign_key(
table_prefix + 'environment_hierarchy_level_environment_id_fkey',
table_prefix + 'environment',
['environment_id'], ['id']
)
def upgrade():
with op.batch_alter_table('pages') as batch_op:
batch_op.drop_column('is_html')
batch_op.drop_column('use_layout_header')
def downgrade():
with op.batch_alter_table('pages') as batch_op:
batch_op.add_column(
sa.Column('is_html', sa.Boolean(), nullable=True)
)
batch_op.add_column(
sa.Column('use_layout_header', sa.Boolean(), nullable=True)
)
def upgrade():
"""Upgrade database."""
# The following is a ridiculous hack to force table recreation for SQLite to
# enable the use of a default timestamp.
recreate = 'auto'
migrate_context = context.get_context()
sqlite_dialect_class = None
if getattr(sa.dialects, 'sqlite', False):
sqlite_dialect_class = (sa.dialects.sqlite.pysqlite
.SQLiteDialect_pysqlite)
if migrate_context.dialect.__class__ == sqlite_dialect_class:
recreate = 'always'
with op.batch_alter_table('games', recreate=recreate) as batch_op:
batch_op.add_column(sa.Column('played_at', sa.DateTime(),
nullable=False, server_default=sa.func.now()))
def downgrade():
"""Downgrade database."""
with op.batch_alter_table('games') as batch_op:
batch_op.drop_column('played_at')
def upgrade():
"""Upgrade database."""
# The following is a ridiculous hack to force table recreation for SQLite to
# enable the use of a default timestamp.
recreate = 'auto'
migrate_context = context.get_context()
sqlite_dialect_class = None
if getattr(sa.dialects, 'sqlite', False):
sqlite_dialect_class = (sa.dialects.sqlite.pysqlite
.SQLiteDialect_pysqlite)
if migrate_context.dialect.__class__ == sqlite_dialect_class:
recreate = 'always'
with op.batch_alter_table('games', recreate=recreate) as batch_op:
batch_op.add_column(sa.Column('last_modified_at', sa.DateTime(),
nullable=False, server_default=sa.func.now()))
def downgrade():
"""Downgrade database."""
with op.batch_alter_table('games') as batch_op:
batch_op.drop_column('last_modified_at')
def upgrade():
"""Upgrade database."""
# The following is a ridiculous hack to force table recreation for SQLite to
# enable the use of a default timestamp.
recreate = 'auto'
migrate_context = context.get_context()
sqlite_dialect_class = None
if getattr(sa.dialects, 'sqlite', False):
sqlite_dialect_class = (sa.dialects.sqlite.pysqlite
.SQLiteDialect_pysqlite)
if migrate_context.dialect.__class__ == sqlite_dialect_class:
recreate = 'always'
with op.batch_alter_table('games', recreate=recreate) as batch_op:
batch_op.add_column(sa.Column('created_at', sa.DateTime(),
nullable=False, server_default=sa.func.now()))
def upgrade():
with op.batch_alter_table('container', schema=None) as batch_op:
batch_op.add_column(sa.Column('interactive', sa.Boolean(),
nullable=True))
batch_op.drop_column('tty')
batch_op.drop_column('stdin_open')
def upgrade():
# batch_alter_table() is necessary here because SQLite does not support
# the SQL ALTER statement which would normally be emitted here.
# What this function does is basically cause all the data in the old
# version of the table to be copied into a new table, then delete the old
# table at the end if everything works as planned.
with op.batch_alter_table('menu_entries') as batch_op:
batch_op.create_unique_constraint(
constraint_name="unique_menu_entry_date_mensa_category_description",
columns=['date_valid', 'mensa', 'category', 'description'])
def downgrade():
with op.batch_alter_table('menu_entries') as batch_op:
batch_op.drop_constraint(
constraint_name="unique_menu_entry_date_mensa_category_description",
type_='unique')
bdaa763e6c56_make_xcom_value_column_a_large_binary.py 文件源码
项目:incubator-airflow-old
作者: apache
项目源码
文件源码
阅读 29
收藏 0
点赞 0
评论 0
def upgrade():
# There can be data truncation here as LargeBinary can be smaller than the pickle
# type.
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("xcom") as batch_op:
batch_op.alter_column('value', type_=sa.LargeBinary())
bdaa763e6c56_make_xcom_value_column_a_large_binary.py 文件源码
项目:incubator-airflow-old
作者: apache
项目源码
文件源码
阅读 27
收藏 0
点赞 0
评论 0
def downgrade():
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("xcom") as batch_op:
batch_op.alter_column('value', type_=sa.PickleType(pickler=dill))
2e541a1dcfed_task_duration.py 文件源码
项目:incubator-airflow-old
作者: apache
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def upgrade():
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("task_instance") as batch_op:
batch_op.alter_column('duration',
existing_type=mysql.INTEGER(display_width=11),
type_=sa.Float(),
existing_nullable=True)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.add_column(sa.Column('ldap_auth', sa.Boolean(), server_default=sa.text('false'), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('users', schema=None) as batch_op:
batch_op.drop_column('ldap_auth')
### end Alembic commands ###
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('menus', schema=None) as batch_op:
batch_op.drop_column('location')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('menus', schema=None) as batch_op:
batch_op.add_column(sa.Column('location', sa.VARCHAR(length=256), nullable=False))
### end Alembic commands ###
5927940a466e_create_shared_ips_columns.py 文件源码
项目:quark
作者: openstack
项目源码
文件源码
阅读 23
收藏 0
点赞 0
评论 0
def upgrade():
with op.batch_alter_table(t1_name) as batch_op:
batch_op.add_column(sa.Column('enabled',
sa.Boolean(),
nullable=False,
server_default='1'))
with op.batch_alter_table(t2_name) as batch_op:
batch_op.add_column(sa.Column('do_not_use',
sa.Boolean(),
nullable=False,
server_default='0'))
5927940a466e_create_shared_ips_columns.py 文件源码
项目:quark
作者: openstack
项目源码
文件源码
阅读 21
收藏 0
点赞 0
评论 0
def downgrade():
"""alexm: i believe this method is never called"""
with op.batch_alter_table(t2_name) as batch_op:
batch_op.drop_column('do_not_use')
with op.batch_alter_table(t1_name) as batch_op:
batch_op.drop_column('enabled')