我们从Python开源项目中,提取了以下8个代码示例,用于说明如何使用sqlalchemy.LargeBinary()。
def upgrade(): # There can be data truncation here as LargeBinary can be smaller than the pickle # type. # use batch_alter_table to support SQLite workaround with op.batch_alter_table("xcom") as batch_op: batch_op.alter_column('value', type_=sa.LargeBinary())
def init(self, auto_create=True): # TODO handle if user does not pass in table sqlite://path.db uri_splt = self.uri.split(":") engine_uri = u":".join(uri_splt[:-1]) table_name = uri_splt[-1] metadata = MetaData() postref_table = Table(table_name, metadata, Column('id', Integer, primary_key=True), Column('created_at', DateTime, default=func.now()), Column('updated_at', DateTime, default=func.now(), onupdate=func.current_timestamp()), Column('uuid', String(512)), Column('path', String(512)), Column('revision', Integer, default=0), Column('status', Integer, default=self.PostStatus.DRAFT.value), Column('ref', String(512)), Column('data', LargeBinary)) self.engine = create_engine(engine_uri, pool_recycle=3600) self.session = scoped_session(sessionmaker(bind=self.engine)) if auto_create: postref_table.create(self.engine, checkfirst=True) class PostRef(object): pass mapper(PostRef, postref_table) self.PostRef = PostRef # ------------- Repository actions / state ------------------------------------
def variants_genotype_columns(self): return [sql.Column(name, sql.LargeBinary()) for name in self.gt_cols]
def upgrade(): op.create_table( 'glare_blob_data', sa.Column('id', sa.String(255), primary_key=True, nullable=False), # Because of strange behavior of mysql LargeBinary is converted to # BLOB instead of LONGBLOB. So we have to fix it explicitly with # 'with_variant' call. sa.Column( 'data', sa.LargeBinary().with_variant(mysql.LONGBLOB(), 'mysql'), nullable=False), sa.PrimaryKeyConstraint('id'), mysql_engine=MYSQL_ENGINE, mysql_charset=MYSQL_CHARSET )
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('server_session', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('session_id', sa.TEXT(), nullable=True), sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('expiry', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('session_id') ) ### end Alembic commands ###
def upgrade(): op.create_table('accounts', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.String(), nullable=False), sa.Column('policy_enabled', sa.Boolean(), server_default='FALSE', nullable=False), sa.Column('policy_keep_latest', sa.Integer(), server_default='0', nullable=False), sa.Column('policy_keep_favourites', sa.Boolean(), server_default='TRUE', nullable=False), sa.Column('policy_delete_every', sa.Interval(), server_default='0', nullable=False), sa.Column('policy_keep_younger', sa.Interval(), server_default='0', nullable=False), sa.Column('display_name', sa.String(), nullable=True), sa.Column('screen_name', sa.String(), nullable=True), sa.Column('avatar_url', sa.String(), nullable=True), sa.Column('last_fetch', sa.DateTime(), server_default='epoch', nullable=True), sa.Column('last_delete', sa.DateTime(), server_default='epoch', nullable=True), sa.PrimaryKeyConstraint('id', name=op.f('pk_accounts')) ) op.create_table('oauth_tokens', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('token', sa.String(), nullable=False), sa.Column('token_secret', sa.String(), nullable=False), sa.Column('account_id', sa.String(), nullable=True), sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], name=op.f('fk_oauth_tokens_account_id_accounts'), onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('token', name=op.f('pk_oauth_tokens')) ) op.create_table('posts', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.String(), nullable=False), sa.Column('body', sa.String(), nullable=True), sa.Column('author_id', sa.String(), nullable=False), sa.Column('favourite', sa.Boolean(), server_default='FALSE', nullable=False), sa.ForeignKeyConstraint(['author_id'], ['accounts.id'], name=op.f('fk_posts_author_id_accounts'), onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name=op.f('pk_posts')) ) op.create_table('sessions', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.String(), nullable=False), sa.Column('account_id', sa.String(), nullable=False), sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], name=op.f('fk_sessions_account_id_accounts'), onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name=op.f('pk_sessions')) ) op.create_table('twitter_archives', sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=False), sa.Column('id', sa.Integer(), nullable=False), sa.Column('account_id', sa.String(), nullable=False), sa.Column('body', sa.LargeBinary(), nullable=False), sa.Column('chunks', sa.Integer(), nullable=True), sa.Column('chunks_successful', sa.Integer(), server_default='0', nullable=False), sa.Column('chunks_failed', sa.Integer(), server_default='0', nullable=False), sa.ForeignKeyConstraint(['account_id'], ['accounts.id'], name=op.f('fk_twitter_archives_account_id_accounts'), onupdate='CASCADE', ondelete='CASCADE'), sa.PrimaryKeyConstraint('id', name=op.f('pk_twitter_archives')) )
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('build', sa.Column('id', sa.Integer(), nullable=False), sa.Column('build', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('classification', sa.Column('id', sa.Integer(), nullable=False), sa.Column('classification', sa.String(), nullable=True), sa.Column('domain', sa.String(), nullable=True), sa.Column('probe', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('guilty', sa.Column('id', sa.Integer(), nullable=False), sa.Column('function', sa.String(), nullable=True), sa.Column('module', sa.String(), nullable=True), sa.Column('comment', sa.String(), nullable=True), sa.Column('hide', sa.Boolean(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('guilty_blacklisted', sa.Column('id', sa.Integer(), nullable=False), sa.Column('function', sa.String(), nullable=True), sa.Column('module', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('records', sa.Column('id', sa.Integer(), nullable=False), sa.Column('severity', sa.Integer(), nullable=True), sa.Column('machine', sa.String(), nullable=True), sa.Column('machine_id', sa.String(), nullable=True), sa.Column('architecture', sa.String(), nullable=True), sa.Column('kernel_version', sa.String(), nullable=True), sa.Column('os_name', sa.String(), nullable=True), sa.Column('record_format_version', sa.Integer(), nullable=True), sa.Column('payload_format_version', sa.Integer(), nullable=True), sa.Column('payload', sa.LargeBinary(), nullable=True), sa.Column('tsp', sa.Integer(), nullable=True), sa.Column('tsp_server', sa.Integer(), nullable=True), sa.Column('buildstamp', sa.String(), nullable=True), sa.Column('backtrace', sa.String(), nullable=True), sa.Column('dupe_of', sa.Integer(), nullable=True), sa.Column('dupecount', sa.Integer(), nullable=True), sa.Column('dupemaster', sa.Boolean(), nullable=True), sa.Column('security', sa.Boolean(), nullable=True), sa.Column('hide', sa.Boolean(), nullable=True), sa.Column('processed', sa.Boolean(), nullable=True), sa.Column('icon', sa.String(), nullable=True), sa.Column('classification_id', sa.Integer(), nullable=True), sa.Column('build_id', sa.Integer(), nullable=True), sa.Column('guilty_id', sa.Integer(), nullable=True), sa.Column('external', sa.Boolean(), nullable=True), sa.ForeignKeyConstraint(['build_id'], ['build.id'], ), sa.ForeignKeyConstraint(['classification_id'], ['classification.id'], ), sa.ForeignKeyConstraint(['guilty_id'], ['guilty.id'], ), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ###
def map_column(self, mode: EditMode, request: Request, node: colander.SchemaNode, model: type, name: str, column: Column, column_type: TypeEngine) -> t.Tuple[colander.SchemaType, dict]: """Map non-relationship SQLAlchemy column to Colander SchemaNode. :return: Tuple(constructed colander.SchemaType, dict of addtional colander.SchemaNode construction arguments) """ logger.debug("Mapping field %s, mode %s, node %s, column %s, column type %s", name, mode, node, column, column_type) # Check for autogenerated columns (updated_at) if column.onupdate: if mode in (EditMode.edit, EditMode.add): return TypeOverridesHandling.drop, {} # Don't fill default values when added, as they are automatically populated if column.default: if mode == EditMode.add: return TypeOverridesHandling.drop, {} # Never add primary keys # NOTE: TODO: We need to preserve ids because of nesting mechanism and groupedit widget wants it id if column.primary_key: # TODO: Looks like column.autoincrement is set True by default, so we cannot use it here if mode in (EditMode.edit, EditMode.add): return TypeOverridesHandling.drop, {} if column.foreign_keys: # Handled by relationship mapper return TypeOverridesHandling.drop, {} elif isinstance(column_type, (PostgreSQLUUID, columns.UUID)): # UUID's cannot be22 edited if mode in (EditMode.add, EditMode.edit): return TypeOverridesHandling.drop, {} # But let's show them return fields.UUID(), dict(missing=colander.drop, widget=FriendlyUUIDWidget(readonly=True)) elif isinstance(column_type, Text): return colander.String(), dict(widget=deform.widget.TextAreaWidget()) elif isinstance(column_type, JSONB): return JSONValue(), dict(widget=JSONWidget()) elif isinstance(column_type, (JSONB, columns.JSONB)): # Can't edit JSON if mode in (EditMode.add, EditMode.edit): return TypeOverridesHandling.drop, {} return colander.String(), {} elif isinstance(column_type, LargeBinary): # Can't edit binary return TypeOverridesHandling.drop, {} elif isinstance(column_type, Geometry): # Can't edit geometry return TypeOverridesHandling.drop, {} elif isinstance(column_type, (INET, columns.INET)): return colander.String(), {} else: # Default mapping / unknown, let the parent handle return TypeOverridesHandling.unknown, {}