def define_tables(cls, metadata):
Table('test_table', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(50))
)
python类Integer()的实例源码
def test_nullable_reflection(self):
t = Table('t', self.metadata,
Column('a', Integer, nullable=True),
Column('b', Integer, nullable=False))
t.create()
eq_(
dict(
(col['name'], col['nullable'])
for col in inspect(self.metadata.bind).get_columns('t')
),
{"a": True, "b": False}
)
def define_tables(cls, metadata):
Table(
quoted_name('t1', quote=True), metadata,
Column('id', Integer, primary_key=True),
)
Table(
quoted_name('t2', quote=True), metadata,
Column('id', Integer, primary_key=True),
Column('t1id', ForeignKey('t1.id'))
)
def _adapt_expression(self, op, other_comparator):
"""evaluate the return type of <self> <op> <othertype>,
and apply any adaptations to the given operator.
This method determines the type of a resulting binary expression
given two source types and an operator. For example, two
:class:`.Column` objects, both of the type :class:`.Integer`, will
produce a :class:`.BinaryExpression` that also has the type
:class:`.Integer` when compared via the addition (``+``) operator.
However, using the addition operator with an :class:`.Integer`
and a :class:`.Date` object will produce a :class:`.Date`, assuming
"days delta" behavior by the database (in reality, most databases
other than PostgreSQL don't accept this particular operation).
The method returns a tuple of the form <operator>, <type>.
The resulting operator and type will be those applied to the
resulting :class:`.BinaryExpression` as the final operator and the
right-hand side of the expression.
Note that only a subset of operators make usage of
:meth:`._adapt_expression`,
including math operators and user-defined operators, but not
boolean comparison or special SQL keywords like MATCH or BETWEEN.
"""
return op, self.type
def compare_server_default(self, inspector_column,
metadata_column,
rendered_metadata_default,
rendered_inspector_default):
# partially a workaround for SQLAlchemy issue #3023; if the
# column were created without "NOT NULL", MySQL may have added
# an implicit default of '0' which we need to skip
if metadata_column.type._type_affinity is sqltypes.Integer and \
inspector_column.primary_key and \
not inspector_column.autoincrement and \
not rendered_metadata_default and \
rendered_inspector_default == "'0'":
return False
else:
return rendered_inspector_default != rendered_metadata_default
def check_constraint(self, name, source, condition, schema=None, **kw):
t = sa_schema.Table(source, self.metadata(),
sa_schema.Column('x', Integer), schema=schema)
ck = sa_schema.CheckConstraint(condition, name=name, **kw)
t.append_constraint(ck)
return ck
def define_request_data_counters_table():
global request_data_counters_table
request_data_counters_table = Table('ckanext_requestdata_counters',
metadata,
Column('id', types.UnicodeText,
primary_key=True,
default=make_uuid),
Column('package_id',
types.UnicodeText),
Column('org_id', types.UnicodeText),
Column('requests', types.Integer,
default=0),
Column('replied', types.Integer,
default=0),
Column('declined', types.Integer,
default=0),
Column('shared', types.Integer,
default=0),
Index('ckanext_requestdata_counters_'
'id_idx', 'id'))
mapper(
ckanextRequestDataCounters,
request_data_counters_table
)
def get_column_default_string(self, column):
if (isinstance(column.server_default, schema.DefaultClause) and
isinstance(column.server_default.arg, basestring)):
if isinstance(column.type, (sqltypes.Integer, sqltypes.Numeric)):
return self.sql_compiler.process(text(column.server_default.arg))
return super(InfoDDLCompiler, self).get_column_default_string(column)
### Informix wants the constraint name at the end, hence this ist c&p from sql/compiler.py
def visit_primary_key_constraint(self, constraint):
# for columns with sqlite_autoincrement=True,
# the PRIMARY KEY constraint can only be inline
# with the column itself.
if len(constraint.columns) == 1:
c = list(constraint)[0]
if c.primary_key and \
c.table.kwargs.get('sqlite_autoincrement', False) and \
issubclass(c.type._type_affinity, sqltypes.Integer) and \
not c.foreign_keys:
return None
return super(SQLiteDDLCompiler, self).\
visit_primary_key_constraint(constraint)
def visit_typeclause(self, typeclause):
type_ = typeclause.type.dialect_impl(self.dialect)
if isinstance(type_, sqltypes.Integer):
return 'INTEGER'
else:
return super(DrizzleCompiler, self).visit_typeclause(typeclause)
def _expression_adaptations(self):
return {
operators.mul: {
Interval: Interval,
Numeric: self.__class__,
Integer: self.__class__,
},
# Py2K
operators.div: {
Numeric: self.__class__,
Integer: self.__class__,
},
# end Py2K
operators.truediv: {
Numeric: self.__class__,
Integer: self.__class__,
},
operators.add: {
Numeric: self.__class__,
Integer: self.__class__,
},
operators.sub: {
Numeric: self.__class__,
Integer: self.__class__,
}
}
def define_tables(cls, metadata):
Table('test_table', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(50))
)
def _check_constraint(self, name, source, condition, schema=None, **kw):
t = sa_schema.Table(source, sa_schema.MetaData(),
sa_schema.Column('x', Integer), schema=schema)
ck = sa_schema.CheckConstraint(condition, name=name, **kw)
t.append_constraint(ck)
return ck
def bulk_insert(self, table, rows):
"""Issue a "bulk insert" operation using the current
migration context.
This provides a means of representing an INSERT of multiple rows
which works equally well in the context of executing on a live
connection as well as that of generating a SQL script. In the
case of a SQL script, the values are rendered inline into the
statement.
e.g.::
from alembic import op
from datetime import date
from sqlalchemy.sql import table, column
from sqlalchemy import String, Integer, Date
# Create an ad-hoc table to use for the insert statement.
accounts_table = table('account',
column('id', Integer),
column('name', String),
column('create_date', Date)
)
op.bulk_insert(accounts_table,
[
{'id':1, 'name':'John Smith',
'create_date':date(2010, 10, 5)},
{'id':2, 'name':'Ed Williams',
'create_date':date(2007, 5, 27)},
{'id':3, 'name':'Wendy Jones',
'create_date':date(2008, 8, 15)},
]
)
"""
self.impl.bulk_insert(table, rows)
def define_tables(cls, metadata):
Table('test_table', metadata,
Column('id', Integer, primary_key=True),
Column('data', String(50))
)
def test_nullable_reflection(self):
t = Table('t', self.metadata,
Column('a', Integer, nullable=True),
Column('b', Integer, nullable=False))
t.create()
eq_(
dict(
(col['name'], col['nullable'])
for col in inspect(self.metadata.bind).get_columns('t')
),
{"a": True, "b": False}
)
def _adapt_expression(self, op, other_comparator):
"""evaluate the return type of <self> <op> <othertype>,
and apply any adaptations to the given operator.
This method determines the type of a resulting binary expression
given two source types and an operator. For example, two
:class:`.Column` objects, both of the type :class:`.Integer`, will
produce a :class:`.BinaryExpression` that also has the type
:class:`.Integer` when compared via the addition (``+``) operator.
However, using the addition operator with an :class:`.Integer`
and a :class:`.Date` object will produce a :class:`.Date`, assuming
"days delta" behavior by the database (in reality, most databases
other than PostgreSQL don't accept this particular operation).
The method returns a tuple of the form <operator>, <type>.
The resulting operator and type will be those applied to the
resulting :class:`.BinaryExpression` as the final operator and the
right-hand side of the expression.
Note that only a subset of operators make usage of
:meth:`._adapt_expression`,
including math operators and user-defined operators, but not
boolean comparison or special SQL keywords like MATCH or BETWEEN.
"""
return op, self.type
def compare_server_default(self, inspector_column,
metadata_column,
rendered_metadata_default,
rendered_inspector_default):
# partially a workaround for SQLAlchemy issue #3023; if the
# column were created without "NOT NULL", MySQL may have added
# an implicit default of '0' which we need to skip
if metadata_column.type._type_affinity is sqltypes.Integer and \
inspector_column.primary_key and \
not inspector_column.autoincrement and \
not rendered_metadata_default and \
rendered_inspector_default == "'0'":
return False
else:
return rendered_inspector_default != rendered_metadata_default
def check_constraint(self, name, source, condition, schema=None, **kw):
t = sa_schema.Table(source, self.metadata(),
sa_schema.Column('x', Integer), schema=schema)
ck = sa_schema.CheckConstraint(condition, name=name, **kw)
t.append_constraint(ck)
return ck
def coerce_compared_value(self, op, value):
# NOTE(mdietz): If left unimplemented, the column is coerced into a
# string every time, causing the next_auto_assign_increment to be a
# string concatenation rather than an addition. 'value' in the
# signature is the "other" value being compared for the purposes of
# casting.
if isinstance(value, int):
return types.Integer()
return self