我们从Python开源项目中,提取了以下9个代码示例,用于说明如何使用sqlalchemy.BIGINT。
def downgrade(): # Drop trigger op.execute( 'DROP TRIGGER increment_num_rows_in_db_array ON callset_to_db_array_association CASCADE') op.drop_column(u'db_array', 'num_rows') op.create_table( 'db_row', sa.Column('id', sa.BIGINT(), nullable=False), sa.Column('db_array_id', sa.BIGINT(), autoincrement=False, nullable=False), sa.Column('tile_row_id', sa.BIGINT(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['db_array_id'],[u'db_array.id'], name=u'db_row_db_array_id_fkey'), sa.PrimaryKeyConstraint('id', name=u'db_row_pkey')) op.add_column( u'callset', sa.Column('individual_id', sa.BIGINT(), autoincrement=False, nullable=False)) op.add_column(u'callset', sa.Column('dbrow_id', sa.BIGINT(), autoincrement=False, nullable=False)) op.drop_constraint('callset_source_sample_id_fkey','callset', type_='foreignkey') op.drop_constraint('callset_target_sample_id_fkey','callset', type_='foreignkey') op.create_foreign_key(u'callset_individual_id_fkey','callset', 'individual', ['individual_id'], ['id']) op.create_foreign_key(u'callset_dbrow_id_fkey','callset', 'db_row', ['dbrow_id'], ['id']) op.drop_column(u'callset', 'target_sample_id') op.drop_column(u'callset', 'source_sample_id') op.drop_index('db_array_id_tile_row_id_idx',table_name='callset_to_db_array_association') op.drop_table('callset_to_db_array_association') op.drop_table('sample')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('run', sa.Column('id', sa.Integer(), nullable=False), sa.Column('run_id', sa.String(length=30), nullable=True), sa.Column('library_reads_sequenced', sa.BIGINT(), nullable=True), sa.Column('total_num_bases', sa.BIGINT(), nullable=True), sa.Column('download_size', sa.BIGINT(), nullable=True), sa.Column('avg_read_length', sa.Float(), nullable=True), sa.Column('baseA_count', sa.BIGINT(), nullable=True), sa.Column('baseC_count', sa.BIGINT(), nullable=True), sa.Column('baseG_count', sa.BIGINT(), nullable=True), sa.Column('baseT_count', sa.BIGINT(), nullable=True), sa.Column('baseN_count', sa.BIGINT(), nullable=True), sa.Column('gc_percent', sa.Float(), nullable=True), sa.Column('run_quality_counts', sa.Text(), nullable=True), sa.Column('dataset_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['dataset_id'], ['dataset.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_unique_constraint(None, 'dataset', ['db_source_uid']) # ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'reference_set', sa.Column('next_tiledb_column_offset', sa.BigInteger(), nullable=False, default=0)) op.add_column( 'reference', sa.Column('tiledb_column_offset', sa.BigInteger(), nullable=True)) op.alter_column('reference', 'length', existing_type=sa.BIGINT(), nullable=False) op.alter_column('reference', 'name', existing_type=sa.TEXT(), nullable=False) op.create_unique_constraint('unique_name_per_reference_set_constraint', 'reference', ['reference_set_id', 'name']) op.create_index('unique_reference_set_id_offset_idx', 'reference', ['reference_set_id', 'tiledb_column_offset'], unique=True) op.drop_column('reference', 'offset') # Trigger on reference insertion op.execute('''\ CREATE OR REPLACE FUNCTION increment_next_column_in_reference_set_pgsql() RETURNS trigger AS $increment_next_column_in_reference_set_pgsql$ BEGIN UPDATE reference SET tiledb_column_offset=(select next_tiledb_column_offset from reference_set where id=NEW.reference_set_id) where NEW.tiledb_column_offset IS NULL and id=NEW.id; UPDATE reference_set SET next_tiledb_column_offset=next_tiledb_column_offset+NEW.length WHERE id = NEW.reference_set_id; RETURN NEW; END; $increment_next_column_in_reference_set_pgsql$ LANGUAGE plpgsql; CREATE TRIGGER increment_next_column_in_reference_set AFTER INSERT ON reference FOR EACH ROW EXECUTE PROCEDURE increment_next_column_in_reference_set_pgsql(); ''') ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### # Drop trigger op.execute( 'DROP TRIGGER increment_next_column_in_reference_set ON reference CASCADE') op.add_column('reference', sa.Column('offset', sa.BIGINT(), autoincrement=False, nullable=True)) op.drop_index('unique_reference_set_id_offset_idx', table_name='reference') op.drop_constraint('unique_name_per_reference_set_constraint', 'reference', type_='unique') op.alter_column('reference', 'name', existing_type=sa.TEXT(), nullable=True) op.alter_column('reference', 'length', existing_type=sa.BIGINT(), nullable=True) op.drop_column('reference', 'tiledb_column_offset') op.drop_column('reference_set', 'next_tiledb_column_offset') ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('file', sa.Column('file_id', sa.BIGINT(), nullable=False), sa.Column('sha512_hash', sa.TEXT(), nullable=True), sa.Column('sha256_hash', sa.TEXT(), nullable=True), sa.Column('sha1_hash', sa.TEXT(), nullable=True), sa.Column('md5_hash', sa.TEXT(), nullable=True), sa.Column('size', sa.FLOAT(), nullable=True), sa.Column('mime_type', sa.VARCHAR(length=120), nullable=True), sa.Column('submitted_by', sa.VARCHAR(length=120), nullable=False), sa.Column('status', sa.VARCHAR(length=20), nullable=False), sa.Column('last_updated', sa.DATETIME(), nullable=False), sa.Column('first_seen', sa.DATETIME(), nullable=False), sa.PrimaryKeyConstraint('file_id') ) op.create_table('lookup_request', sa.Column('request_id', sa.BIGINT(), nullable=False), sa.Column('requested_at', sa.DATETIME(), nullable=False), sa.Column('requestor', sa.VARCHAR(length=120), nullable=False), sa.Column('file_id', sa.BIGINT(), nullable=True), sa.Column('lookup_hash', sa.TEXT(), nullable=False), sa.Column('result', sa.VARCHAR(length=20), nullable=False), sa.ForeignKeyConstraint(['file_id'], ['file.file_id'], ), sa.PrimaryKeyConstraint('request_id') ) # ### end Alembic commands ###
def upgrade(): op.alter_column('file', 'obsid', existing_type=sa.BIGINT(), nullable=True)
def downgrade(): # This probably won't work in practice since the rows with null obsids will need # to be deleted. op.alter_column('file', 'obsid', existing_type=sa.BIGINT(), nullable=False)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('queue', sa.Column('id', sa.BIGINT(), nullable=False), sa.Column('enqueued_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), sa.Column('dequeued_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('expected_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('schedule_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('q_name', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('data', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index('priority_idx', 'queue', ['schedule_at', 'expected_at'], unique=False) # ### end Alembic commands ###
def upgrade_rdr(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('measurement', sa.Column('measurement_id', sa.BIGINT(), autoincrement=False, nullable=False), sa.Column('physical_measurements_id', sa.Integer(), nullable=False), sa.Column('code_system', sa.String(length=255), nullable=False), sa.Column('code_value', sa.String(length=255), nullable=False), sa.Column('measurement_time', model.utils.UTCDateTime(), nullable=False), sa.Column('body_site_code_system', sa.String(length=255), nullable=True), sa.Column('body_site_code_value', sa.String(length=255), nullable=True), sa.Column('value_string', sa.String(length=1024), nullable=True), sa.Column('value_decimal', sa.Float(), nullable=True), sa.Column('value_unit', sa.String(length=255), nullable=True), sa.Column('value_code_system', sa.String(length=255), nullable=True), sa.Column('value_code_value', sa.String(length=255), nullable=True), sa.Column('value_datetime', model.utils.UTCDateTime(), nullable=True), sa.Column('parent_id', sa.BIGINT(), nullable=True), sa.Column('qualifier_id', sa.BIGINT(), nullable=True), sa.ForeignKeyConstraint(['parent_id'], ['measurement.measurement_id'], ), sa.ForeignKeyConstraint(['physical_measurements_id'], ['physical_measurements.physical_measurements_id'], ), sa.ForeignKeyConstraint(['qualifier_id'], ['measurement.measurement_id'], ), sa.PrimaryKeyConstraint('measurement_id') ) op.create_table('measurement_to_qualifier', sa.Column('measurement_id', sa.BIGINT(), nullable=False), sa.Column('qualifier_id', sa.BIGINT(), nullable=False), sa.ForeignKeyConstraint(['measurement_id'], ['measurement.measurement_id'], ), sa.ForeignKeyConstraint(['qualifier_id'], ['measurement.measurement_id'], ), sa.PrimaryKeyConstraint('measurement_id', 'qualifier_id') ) # ### end Alembic commands ###