def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('timezone', sa.String(length=64), server_default='UTC', nullable=False))
### end Alembic commands ###
key = 'timezone'
for user in session.query(User):
settings = json.loads(user.settings) if user.settings else {}
if key in settings:
user.timezone = settings[key]
del settings[key]
user.settings = json.dumps(settings)
else:
user.timezone = DEFAULT_TIMEZONE
session.commit()
python类get_bind()的实例源码
4fbcae87c090_move_user_timezone_to_a_column.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 30
收藏 0
点赞 0
评论 0
7927d63d556_n_answers_migration.py 文件源码
项目:FRG-Crowdsourcing
作者: 97amarnathk
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def upgrade():
task = table('task',
column('id'),
column('info')
)
conn = op.get_bind()
query = select([task.c.id, task.c.info])
tasks = conn.execute(query)
update_values = []
for row in tasks:
info_data = row.info
info_dict = json.loads(info_data)
if info_dict.get('n_answers'):
del info_dict['n_answers']
update_values.append({'task_id': row.id, 'new_info': json.dumps(info_dict)})
task_update = task.update().\
where(task.c.id == bindparam('task_id')).\
values(info=bindparam('new_info'))
if len(update_values) > 0:
conn.execute(task_update, update_values)
7927d63d556_n_answers_migration.py 文件源码
项目:FRG-Crowdsourcing
作者: 97amarnathk
项目源码
文件源码
阅读 23
收藏 0
点赞 0
评论 0
def downgrade():
task = table('task',
column('id'),
column('info'),
column('n_answers')
)
conn = op.get_bind()
query = select([task.c.id, task.c.info, task.c.n_answers])
tasks = conn.execute(query)
update_values = []
for row in tasks:
info_data = row.info
info_dict = json.loads(info_data)
info_dict['n_answers'] = row.n_answers
update_values.append({'task_id': row.id, 'new_info': json.dumps(info_dict)})
task_update = task.update().\
where(task.c.id == bindparam('task_id')).\
values(info=bindparam('new_info'))
if len(update_values) > 0:
conn.execute(task_update, update_values)
18d04a76914f_billing_fixes.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 30
收藏 0
点赞 0
评论 0
def upgrade():
op.alter_column('kubes', 'name', existing_type=sa.VARCHAR(length=64),
nullable=False)
op.create_index('one_default', 'kubes', ['is_default'], unique=True,
postgresql_where=sa.text(u'kubes.is_default IS true'))
op.drop_constraint(u'kubes_is_default_key', 'kubes', type_='unique')
op.alter_column('packages', 'name', existing_type=sa.VARCHAR(length=64),
nullable=False)
op.alter_column('packages', 'prefix', existing_type=sa.VARCHAR(),
nullable=False)
op.alter_column('packages', 'suffix', existing_type=sa.VARCHAR(),
nullable=False)
session = Session(bind=op.get_bind())
session.query(PackageKube).filter(sa.or_(
PackageKube.package_id.is_(None), PackageKube.kube_id.is_(None),
)).delete()
session.commit()
op.alter_column('package_kube', 'kube_id', existing_type=sa.INTEGER(),
nullable=False)
op.alter_column('package_kube', 'package_id', existing_type=sa.INTEGER(),
nullable=False)
1507a7289a2f_create_is_encrypted.py 文件源码
项目:incubator-airflow-old
作者: apache
项目源码
文件源码
阅读 24
收藏 0
点赞 0
评论 0
def upgrade():
# first check if the user already has this done. This should only be
# true for users who are upgrading from a previous version of Airflow
# that predates Alembic integration
conn = op.get_bind()
inspector = Inspector.from_engine(conn)
# this will only be true if 'connection' already exists in the db,
# but not if alembic created it in a previous migration
if 'connection' in inspector.get_table_names():
col_names = [c['name'] for c in inspector.get_columns('connection')]
if 'is_encrypted' in col_names:
return
op.add_column(
'connection',
sa.Column('is_encrypted', sa.Boolean, unique=False, default=False))
conn = op.get_bind()
conn.execute(
connectionhelper.update().values(is_encrypted=False)
)
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('law', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True))
op.create_index('ix_law_search_vector', 'law', ['search_vector'], unique=False, postgresql_using='gin')
op.add_column('page', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True))
op.create_index('ix_page_search_vector', 'page', ['search_vector'], unique=False, postgresql_using='gin')
op.add_column('post', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True))
op.create_index('ix_post_search_vector', 'post', ['search_vector'], unique=False, postgresql_using='gin')
op.add_column('proposal', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True))
op.create_index('ix_proposal_search_vector', 'proposal', ['search_vector'], unique=False, postgresql_using='gin')
op.add_column('topic', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True))
op.create_index('ix_topic_search_vector', 'topic', ['search_vector'], unique=False, postgresql_using='gin')
op.add_column('user', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True))
op.create_index('ix_user_search_vector', 'user', ['search_vector'], unique=False, postgresql_using='gin')
# ### manually inserted searchable sync ###
conn = op.get_bind()
sync_trigger(conn, 'law', 'search_vector', ['content'])
sync_trigger(conn, 'page', 'search_vector', ['title', 'content'])
sync_trigger(conn, 'post', 'search_vector', ['content'])
sync_trigger(conn, 'proposal', 'search_vector', ['description'])
sync_trigger(conn, 'topic', 'search_vector', ['name', 'description'])
sync_trigger(conn, 'user', 'search_vector', ['username'])
# ### end Alembic commands ###
1664300cb03a_populate_first_last_ip_to_ippc.py 文件源码
项目:quark
作者: openstack
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def upgrade():
ip_policy_cidrs = table('quark_ip_policy_cidrs',
column('id', sa.String(length=36)),
column('first_ip', INET()),
column('last_ip', INET()),
column('cidr', sa.String(length=64)))
connection = op.get_bind()
# 1. Retrieve all ip_policy_cidr rows.
results = connection.execute(
select([ip_policy_cidrs.c.id, ip_policy_cidrs.c.cidr])
).fetchall()
# 2. Populate first_ip, last_ip for each IP Policy CIDR.
for ippc in results:
net = netaddr.IPNetwork(ippc["cidr"]).ipv6()
connection.execute(ip_policy_cidrs.update().values(
first_ip=net.first, last_ip=net.last).where(
ip_policy_cidrs.c.id == ippc["id"]))
a54c57ada3f5_removes_useless_indexes.py 文件源码
项目:gnocchi
作者: gnocchixyz
项目源码
文件源码
阅读 24
收藏 0
点赞 0
评论 0
def upgrade():
bind = op.get_bind()
# NOTE(sileht): mysql can't delete an index on a foreign key
# even this one is not the index used by the foreign key itself...
# In our case we have two indexes fk_resource_history_id_resource_id and
# and ix_resource_history_id, we want to delete only the second, but mysql
# can't do that with a simple DROP INDEX ix_resource_history_id...
# so we have to remove the constraint and put it back...
if bind.engine.name == "mysql":
op.drop_constraint("fk_resource_history_id_resource_id",
type_="foreignkey", table_name="resource_history")
for table, colname in resource_tables + history_tables + other_tables:
op.drop_index("ix_%s_%s" % (table, colname), table_name=table)
if bind.engine.name == "mysql":
op.create_foreign_key("fk_resource_history_id_resource_id",
"resource_history", "resource", ["id"], ["id"],
ondelete="CASCADE")
f8bb0efac483_nuget_ecosystem.py 文件源码
项目:fabric8-analytics-worker
作者: fabric8-analytics
项目源码
文件源码
阅读 28
收藏 0
点赞 0
评论 0
def upgrade():
"""Upgrade the database to a newer revision."""
# ### commands auto generated by Alembic - please adjust! ###
# See https://bitbucket.org/zzzeek/alembic/issues/123/a-way-to-run-non-transactional-ddl
connection = None
if not op.get_context().as_sql:
connection = op.get_bind()
connection.execution_options(isolation_level='AUTOCOMMIT')
op.execute("ALTER TYPE ecosystem_backend_enum ADD VALUE 'nuget'")
op.execute("INSERT INTO ecosystems VALUES "
"('{id}', '{name}', '{backend}', '{url}', '{fetch_url}')".
format(id=8, name='nuget', backend='nuget',
url='https://nuget.org/', fetch_url='https://api.nuget.org/packages/'))
if connection is not None:
connection.execution_options(isolation_level='READ_COMMITTED')
# ### end Alembic commands ###
d9530a529b3f_add_timezone_awareness_for_datetime.py 文件源码
项目:hotface
作者: linhanqiuinc24
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def downgrade():
connection = op.get_bind()
if connection.engine.dialect.name != "sqlite":
# user/models.py
op.alter_column('users', 'date_joined', type_=sa.DateTime(), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'lastseen', type_=sa.DateTime(), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'birthday', type_=sa.DateTime(), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('users', 'last_failed_login', type_=sa.DateTime(), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
# message/models.py
op.alter_column('conversations', 'date_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('messages', 'date_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
# forum/models.py
op.alter_column('topicsread', 'last_read', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forumsread', 'last_read', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forumsread', 'cleared', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('reports', 'reported', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('reports', 'zapped', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('posts', 'date_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('posts', 'date_modified', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('topics', 'date_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('topics', 'last_updated', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
op.alter_column('forums', 'last_post_created', type_=sa.DateTime(timezone=False), existing_type=flaskbb.utils.database.UTCDateTime(timezone=True), existing_nullable=True)
1507a7289a2f_create_is_encrypted.py 文件源码
项目:airflow
作者: apache-airflow
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def upgrade():
# first check if the user already has this done. This should only be
# true for users who are upgrading from a previous version of Airflow
# that predates Alembic integration
inspector = Inspector.from_engine(settings.engine)
# this will only be true if 'connection' already exists in the db,
# but not if alembic created it in a previous migration
if 'connection' in inspector.get_table_names():
col_names = [c['name'] for c in inspector.get_columns('connection')]
if 'is_encrypted' in col_names:
return
op.add_column(
'connection',
sa.Column('is_encrypted', sa.Boolean, unique=False, default=False))
conn = op.get_bind()
conn.execute(
connectionhelper.update().values(is_encrypted=False)
)
def upgrade():
conn = op.get_bind()
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('projects', sa.Column('task_creation_mode', sa.Integer(), nullable=True))
op.create_index('idx_geometry', 'projects', ['geometry'], unique=False, postgresql_using='gist')
op.add_column('tasks', sa.Column('extra_properties', sa.Unicode(), nullable=True))
for project in conn.execute(projects.select()):
zooms = conn.execute(
sa.sql.expression.select([tasks.c.zoom]).distinct(tasks.c.zoom)
.where(tasks.c.project_id == project.id))
zooms = zooms.fetchall()
if len(zooms) == 1 and zooms[0] == (None,):
op.execute(
projects.update().where(projects.c.id == project.id)
.values(task_creation_mode=1))
# ### end Alembic commands ###
def upgrade():
op.create_table(
'pokemon',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, unique=True, nullable=False),
sa.Column('flavor_text', sa.Text, nullable=False),
sa.Column('habitat', sa.Text, nullable=True, default=None),
sa.Column('color', sa.Text, nullable=False),
sa.Column('shape', sa.Text, nullable=False),
sa.Column('search_vector', TSVectorType(searchable)),
sa.Column('inserted_at', sa.DateTime,
default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime,
default=sa.func.current_timestamp(),
onupdate=sa.func.current_timestamp(), nullable=False)
)
conn = op.get_bind()
sync_trigger(conn, 'pokemon', 'search_vector', searchable)
6914bc522f93_create_pokedexes_table.py 文件源码
项目:cs373-idb
作者: cedricgc
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def upgrade():
op.create_table(
'pokedexes',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, unique=True, nullable=False),
sa.Column('official_name', sa.Text, unique=True, nullable=False),
sa.Column('region', sa.Text, nullable=True, default=None),
sa.Column('description', sa.Text, nullable=True, default=None),
sa.Column('search_vector', TSVectorType(searchable)),
sa.Column('inserted_at', sa.DateTime,
default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime,
default=sa.func.current_timestamp(),
onupdate=sa.func.current_timestamp(), nullable=False)
)
conn = op.get_bind()
sync_trigger(conn, 'pokedexes', 'search_vector', searchable)
def upgrade():
op.create_table(
'moves',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.Text, unique=True, nullable=False),
sa.Column('flavor_text', sa.Text, nullable=True, default=None),
sa.Column('short_effect', sa.Text, nullable=False),
sa.Column('effect', sa.Text, nullable=False),
sa.Column('damage_class', sa.Text, nullable=True, default=None),
sa.Column('power_points', sa.Integer, nullable=True, default=None),
sa.Column('power', sa.Integer, nullable=True, default=None),
sa.Column('accuracy', sa.Integer, nullable=True, default=None),
sa.Column('search_vector', TSVectorType(searchable)),
sa.Column('inserted_at', sa.DateTime,
default=sa.func.current_timestamp(), nullable=False),
sa.Column('updated_at', sa.DateTime,
default=sa.func.current_timestamp(),
onupdate=sa.func.current_timestamp(), nullable=False)
)
conn = op.get_bind()
sync_trigger(conn, 'moves', 'search_vector', searchable)
20170123151655_add_trigger_for_meta_updated.py 文件源码
项目:collectors
作者: opentrials
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def upgrade():
conn = op.get_bind()
func = sa.DDL("""CREATE FUNCTION set_meta_updated()
RETURNS TRIGGER
LANGUAGE plpgsql
AS $$
BEGIN
NEW.meta_updated := now();
RETURN NEW;
END;
$$;""")
conn.execute(func)
for table in updatable_tables:
trigger_params = {'trigger': ('%s_set_meta_updated' % table), 'table': table}
trigger = ("""CREATE TRIGGER %(trigger)s
BEFORE UPDATE ON %(table)s
FOR EACH ROW EXECUTE PROCEDURE set_meta_updated();""" % trigger_params)
conn.execute(trigger)
21bb97866ed8_added_chapter_and_section_to_exercises.py 文件源码
项目:research-eGrader
作者: openstax
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def upgrade():
bind = op.get_bind()
### commands auto generated by Alembic - please adjust! ###
op.add_column('exercises', sa.Column('chapter_id', sa.Integer(), nullable=True))
op.add_column('exercises', sa.Column('section_id', sa.Integer(), nullable=True))
op.add_column('exercises', sa.Column('book_row_id', sa.Integer(), nullable=True))
op.add_column('subjects', sa.Column('book_url', sa.String(), nullable=True))
### end Alembic commands ###
data = [
{'id': 1, 'book_url': 'https://staging-tutor.cnx.org/contents/d52e93f4-8653-4273-86da-3850001c0786'},
{'id': 2, 'book_url': 'https://staging-tutor.cnx.org/contents/334f8b61-30eb-4475-8e05-5260a4866b4b'}
]
for item in data:
update = sa.update(subject_table)\
.where(subject_table.c.id == item['id'])\
.values(dict(book_url=item['book_url']))
bind.execute(update)
2bceb2cb4d7c_add_comment_count_to_torrent.py 文件源码
项目:nyaa
作者: nyaadevs
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('nyaa_torrents', sa.Column('comment_count', sa.Integer(), nullable=False))
op.create_index(op.f('ix_nyaa_torrents_comment_count'), 'nyaa_torrents', ['comment_count'], unique=False)
op.add_column('sukebei_torrents', sa.Column('comment_count', sa.Integer(), nullable=False))
op.create_index(op.f('ix_sukebei_torrents_comment_count'), 'sukebei_torrents', ['comment_count'], unique=False)
# ### end Alembic commands ###
connection = op.get_bind()
print('Updating comment counts on nyaa_torrents...')
connection.execute(sa.sql.text(COMMENT_UPDATE_SQL.format('nyaa')))
print('Done.')
print('Updating comment counts on sukebei_torrents...')
connection.execute(sa.sql.text(COMMENT_UPDATE_SQL.format('sukebei')))
print('Done.')
9ae15c85fa92_remove_fake_root_hierarchy_level_values.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 23
收藏 0
点赞 0
评论 0
def _get_ehlv_class():
table_prefix = context.config.get_main_option('table_prefix')
bind = op.get_bind()
AutoBase = _get_autobase(table_prefix, bind)
return AutoBase.classes.EnvironmentHierarchyLevelValue
9ae15c85fa92_remove_fake_root_hierarchy_level_values.py 文件源码
项目:tuning-box
作者: openstack
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def _get_session():
return sa.orm.Session(bind=op.get_bind(), autocommit=True)
def upgrade():
context = op.get_context()
connection = op.get_bind()
op.create_table('message_blacklist',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('login_id', sa.BigInteger(), nullable=False),
sa.Column('blacklist', postgresql.ARRAY(sa.Integer)),
sa.ForeignKeyConstraint(['login_id'], ['login.id'], ondelete='CASCADE', name="ref_message_blacklist_login_id_to_login"),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('idx_message_blacklist_login_id'), 'message_blacklist', ['login_id'], unique=True)
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
op.add_column('episode', sa.Column('search_vector', sqlalchemy_utils.types.ts_vector.TSVectorType(), nullable=True))
op.create_index('ix_episode_search_vector', 'episode', ['search_vector'], unique=False, postgresql_using='gin')
sync_trigger(conn, 'episode', 'search_vector', ['title', 'description'])
### end Alembic commands ###
def get_bind(self):
"""Return the current 'bind'.
Under normal circumstances, this is the
:class:`~sqlalchemy.engine.Connection` currently being used
to emit SQL to the database.
In a SQL script context, this value is ``None``. [TODO: verify this]
"""
return self.migration_context.impl.bind
28b23145af40_add_is_default_flag_to_kubes.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 20
收藏 0
点赞 0
评论 0
def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
### commands auto generated by Alembic - please adjust! ###
op.add_column('kubes', sa.Column('is_default', sa.Boolean(), nullable=True))
op.create_unique_constraint(None, 'kubes', ['is_default'])
### end Alembic commands ###
kube = session.query(Kube).filter(Kube.id >= 0).order_by(Kube.id).first()
if kube is not None:
kube.is_default = True
session.commit()
50e4a32fa6c3_add_kube_id_field_to_PodState.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def upgrade():
op.add_column('pod_states', sa.Column('kube_id', sa.Integer(),
nullable=True))
session = sa.orm.sessionmaker()(bind=op.get_bind())
for (pod_id, kube_id) in session.query(Pod.id, Pod.kube_id):
session.query(PodState).filter_by(pod_id=pod_id).update(
{'kube_id': kube_id})
session.commit()
op.alter_column('pod_states', 'kube_id', nullable=False)
2c64986d76b9_change_package_payment_type.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def upgrade():
conn = op.get_bind()
q = conn.execute("SELECT count_type FROM packages WHERE name='Standard package'")
r = q.fetchall()
if len(r) and len(r[0]) and r[0][0] is None:
conn.execute("UPDATE packages SET count_type='fixed' WHERE name='Standard package'")
op.alter_column('packages', 'count_type', nullable=False, server_default='fixed')
56ab56a9ac5_change_settings_schema_and_add_initial_.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def upgrade():
session = Session(bind=op.get_bind())
op.drop_column('system_settings', 'created')
op.drop_column('system_settings', 'deleted')
op.add_column('system_settings', sa.Column('label', sa.Text, nullable=True))
op.add_column('system_settings', sa.Column('description', sa.Text, nullable=True))
op.add_column('system_settings', sa.Column('placeholder', sa.String, nullable=True))
billing_link = session.query(SystemSettings).filter_by(name='billing_apps_link').order_by(SystemSettings.id.desc()).first()
if billing_link is not None:
last = billing_link.id
session.query(SystemSettings).filter(SystemSettings.id!=last).delete()
billing_link.label = 'Link to billing system script'
billing_link.description = 'Link to predefined application request processing script'
billing_link.placeholder = 'http://whmcs.com/script.php'
else:
bl = SystemSettings(name='billing_apps_link',
label='Link to billing system script',
description='Link to predefined application request processing script',
placeholder = 'http://whmcs.com/script.php')
session.add(bl)
pd = SystemSettings(name='persitent_disk_max_size',
value='10',
label='Persistent disk maximum size',
description='maximum capacity of a user container persistent disk in GB',
placeholder = 'Enter value to limit PD size')
session.add(pd)
ms = SystemSettings(name='default_smtp_server',
label='Default SMTP server',
description='Default SMTP server',
placeholder = 'Default SMTP server')
session.add(ms)
session.commit()
op.create_unique_constraint('uq_system_settings_name', 'system_settings', ['name'])
2df8c40ab250_add_default_package_flag.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 25
收藏 0
点赞 0
评论 0
def upgrade():
bind = op.get_bind()
op.add_column('packages', sa.Column('is_default', sa.Boolean(), nullable=True))
op.create_unique_constraint(None, 'packages', ['is_default'])
bind.execute("UPDATE packages SET is_default=true WHERE id in (SELECT MIN(id) FROM packages)")
2e00e70316c0_link_container_and_pod_states.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 26
收藏 0
点赞 0
评论 0
def downgrade_data():
session = Session(bind=op.get_bind())
for cs in session.query(ContainerState).all():
cs.pod_id = session.query(PodState).get(cs.pod_state_id).pod_id
session.commit()
46b5b819ba35_change_settings_visibility_and_pods_url_.py 文件源码
项目:kuberdock-platform
作者: cloudlinux
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def upgrade():
session = Session(bind=op.get_bind())
pods = session.query(MenuItem).filter(MenuItem.name=='Pods').one()
pods.path = '/pods/'
admin = session.query(Role).filter(Role.rolename=='Admin').one()
for i in session.query(MenuItemRole).filter(MenuItemRole.role!=admin):
if i.menuitem.name=='Settings':
session.delete(i)
session.commit()