我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用sqlalchemy.BigInteger()。
def delay_table(base): class Delay(base): __tablename__ = 'Delay' id_=Column(Integer, Sequence('delay_id_seq'), primary_key=True) run_id = Column(String, default='') delay_id = Column(String,) sequence = Column(Integer,) recovery = Column(Integer,) seconds = Column(BigInteger, nullable=True) active = Column(Boolean, default=False) activated = Column(DateTime(), nullable=True,) updated = Column(DateTime(), default=datetime.utcnow) __table_args__ = ( UniqueConstraint('run_id', 'delay_id', 'sequence', 'recovery'), ) def __repr__(self): return "<Delay(id='%s', run_id='%s', delay_id='%s', delay='%s', active='%s', activated='%s')>" % ( self.id_, self.run_id, self.delay_id, self.seconds, self.active, self.activated) return Delay
def upgrade(): op.create_table('pd_states', sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('pd_name', sa.String(), nullable=False), sa.Column('size', sa.Integer(), nullable=False), sa.Column('start_time', sa.DateTime(), nullable=False), sa.Column('end_time', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['users.id']), sa.PrimaryKeyConstraint('start_time')) op.create_table('ip_states', sa.Column('pod_id', postgresql.UUID(), nullable=False), sa.Column('ip_address', sa.BigInteger(), nullable=False), sa.Column('start_time', sa.DateTime(), nullable=False), sa.Column('end_time', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['pod_id'], ['pods.id']), sa.PrimaryKeyConstraint('pod_id', 'start_time'))
def upgrade(): migration_context = context.get_context() if migration_context.dialect.name == 'sqlite': id_type = sa.Integer else: id_type = sa.BigInteger op.create_table('coverages', sa.Column('id', id_type, autoincrement=True, primary_key=True), sa.Column('project_name', sa.String(256), nullable=False), sa.Column('coverage_rate', sa.Float()), sa.Column('report_time', sa.DateTime()), sa.Column('report_time_microsecond', sa.Integer(), default=0), mysql_engine='InnoDB') op.create_index('ix_project_name', 'coverages', ['project_name'])
def upgrade(): migration_context = context.get_context() if migration_context.dialect.name == 'sqlite': id_type = sa.Integer else: id_type = sa.BigInteger op.create_table('files', sa.Column('id', id_type, autoincrement=True, primary_key=True), sa.Column('coverage_id', id_type, nullable=False), sa.Column('filename', sa.String(256), nullable=False), sa.Column('line_rate', sa.Float()), mysql_engine='InnoDB') op.create_index('ix_class_coverage_id', 'files', ['coverage_id']) op.create_index('ix_filename', 'files', ['filename'])
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('scoreboard', sa.Column('user_id', sa.BigInteger(), nullable=False), sa.Column('ladder_name', sa.String(), nullable=False), sa.Column('mmr', sa.Integer(), nullable=False), sa.Column('matches', sa.Integer(), server_default='0', nullable=False), sa.Column('win', sa.Integer(), server_default='0', nullable=False), sa.Column('loss', sa.Integer(), server_default='0', nullable=False), sa.Column('dodge', sa.Integer(), server_default='0', nullable=False), sa.Column('leave', sa.Integer(), server_default='0', nullable=False), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('user_id', 'ladder_name') ) op.create_index(op.f('ix_scoreboard_mmr'), 'scoreboard', ['mmr'], unique=False) op.add_column('match', sa.Column('radiant_win', sa.Boolean(), nullable=True)) op.add_column('match', sa.Column('section', sa.String(), server_default='high', nullable=False)) op.add_column('player_in_match', sa.Column('is_dodge', sa.Boolean(), server_default='false', nullable=False)) op.add_column('user', sa.Column('section', sa.String(), nullable=True)) ### end Alembic commands ###
def test_column_adaptation(self): Table( 'simple_items', self.metadata, Column('id', BIGINT), Column('length', DOUBLE_PRECISION) ) assert self.generate_code() == """\ # coding: utf-8 from sqlalchemy import BigInteger, Column, Float, MetaData, Table metadata = MetaData() t_simple_items = Table( 'simple_items', metadata, Column('id', BigInteger), Column('length', Float) ) """
def upgrade(): op.create_table( 'reference', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('guid', sa.String(36), nullable=False, unique=True), sa.Column('length', sa.BigInteger), sa.Column('reference_set_id', sa.BigInteger, sa.ForeignKey('reference_set.id'), nullable=False), sa.Column('md5_checksum', sa.String(32)), sa.Column('name', sa.Text), sa.Column('source_uri', sa.Text), sa.Column('is_derived', sa.Boolean), sa.Column('source_divergence', sa.Float), sa.Column('ncbi_taxon_id', sa.Integer), sa.Column('offset', sa.BigInteger) )
def upgrade(): for currency in currency_list: op.create_table( '{}_ticker'.format(currency), sa.Column('timestamp', sa.BigInteger, primary_key=True), sa.Column('bid_price', sa.Float), sa.Column('bid_size', sa.Float), sa.Column('ask_price', sa.Float), sa.Column('ask_size', sa.Float), sa.Column('daily_change', sa.Float), sa.Column('daily_change_perc', sa.Float), sa.Column('last_price', sa.Float), sa.Column('daily_volume', sa.Float), sa.Column('daily_high', sa.Float), sa.Column('daily_low', sa.Float) )
def upgrade(): ''' upgrade method ''' create_table('rse_transfer_limits', sa.Column('rse_id', GUID()), sa.Column('activity', sa.String(50)), sa.Column('rse_expression', sa.String(3000)), sa.Column('max_transfers', sa.BigInteger), sa.Column('transfers', sa.BigInteger), sa.Column('waitings', sa.BigInteger), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('RSE_TRANSFER_LIMITS_PK', 'rse_transfer_limits', ['rse_id', 'activity']) create_check_constraint('RSE_TRANSFER_LIMITS_CREATED_NN', 'rse_transfer_limits', 'created_at is not null') create_check_constraint('RSE_TRANSFER_LIMITS_UPDATED_NN', 'rse_transfer_limits', 'updated_at is not null') create_foreign_key('RSE_TRANSFER_LIMITS_RSE_ID_FK', 'rse_transfer_limits', 'rses', ['rse_id'], ['id'])
def upgrade(): ''' upgrade method ''' if context.get_context().dialect.name not in ['sqlite']: create_table('sources_history', sa.Column('request_id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('rse_id', GUID()), sa.Column('dest_rse_id', GUID()), sa.Column('url', sa.String(2048)), sa.Column('bytes', sa.BigInteger), sa.Column('ranking', sa.Integer()), sa.Column('is_using', sa.Boolean(), default=False)) add_column('requests', sa.Column('estimated_at', sa.DateTime)) add_column('requests_history', sa.Column('estimated_at', sa.DateTime))
def upgrade(): ''' upgrade method ''' create_table('heartbeats', sa.Column('executable', sa.String(512)), sa.Column('hostname', sa.String(128)), sa.Column('pid', sa.Integer(), autoincrement=False), sa.Column('thread_id', sa.BigInteger(), autoincrement=False), sa.Column('thread_name', sa.String(64)), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('heartbeats_pk', 'heartbeats', ['executable', 'hostname', 'pid', 'thread_id']) create_index('heartbeats_updated_at', 'heartbeats', ['updated_at']) if context.get_context().dialect.name != 'mysql': create_check_constraint('heartbeats_created_nn', 'heartbeats', 'created_at is not null') create_check_constraint('heartbeats_updated_nn', 'heartbeats', 'updated_at is not null')
def upgrade(): ''' upgrade method ''' create_table('collection_replicas', sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('did_type', DIDType.db_type(name='COLLECTION_REPLICAS_TYPE_CHK')), sa.Column('rse_id', GUID()), sa.Column('bytes', sa.BigInteger), sa.Column('length', sa.BigInteger), sa.Column('state', ReplicaState.db_type(name='COLLECTION_REPLICAS_STATE_CHK'), default=ReplicaState.UNAVAILABLE), sa.Column('accessed_at', sa.DateTime), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('COLLECTION_REPLICAS_PK', 'collection_replicas', ['scope', 'name', 'rse_id']) create_foreign_key('COLLECTION_REPLICAS_LFN_FK', 'collection_replicas', 'dids', ['scope', 'name'], ['scope', 'name']) create_foreign_key('COLLECTION_REPLICAS_RSE_ID_FK', 'collection_replicas', 'rses', ['rse_id'], ['id']) create_check_constraint('COLLECTION_REPLICAS_SIZE_NN', 'collection_replicas', 'bytes IS NOT NULL') create_check_constraint('COLLECTION_REPLICAS_STATE_NN', 'collection_replicas', 'state IS NOT NULL') create_index('COLLECTION_REPLICAS_RSE_ID_IDX', 'collection_replicas', ['rse_id'])
def upgrade(): ''' upgrade method ''' if context.get_context().dialect.name == 'postgresql': drop_constraint('ACCOUNT_LIMITS_PK', 'account_limits', type_='primary') drop_constraint('ACCOUNT_LIMITS_ACCOUNT_FK', 'account_limits') drop_constraint('ACCOUNT_LIMITS_CREATED_NN', 'account_limits') drop_constraint('ACCOUNT_LIMITS_UPDATED_NN', 'account_limits') drop_table('account_limits') create_table('account_limits', sa.Column('account', sa.String(25)), sa.Column('rse_id', GUID()), sa.Column('bytes', sa.BigInteger), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('ACCOUNT_LIMITS_PK', 'account_limits', ['account', 'rse_id']) create_check_constraint('ACCOUNT_LIMITS_CREATED_NN', 'account_limits', 'created_at is not null') create_check_constraint('ACCOUNT_LIMITS_UPDATED_NN', 'account_limits', 'updated_at is not null') create_foreign_key('ACCOUNT_LIMITS_ACCOUNT_FK', 'account_limits', 'accounts', ['account'], ['account']) create_foreign_key('ACCOUNT_LIMITS_RSE_ID_FK', 'account_limits', 'rses', ['rse_id'], ['id'])
def downgrade(): ''' downgrade method ''' if context.get_context().dialect.name == 'postgresql': drop_constraint('ACCOUNT_LIMITS_PK', 'account_limits', type_='primary') drop_constraint('ACCOUNT_LIMITS_CREATED_NN', 'account_limits') drop_constraint('ACCOUNT_LIMITS_UPDATED_NN', 'account_limits') drop_constraint('ACCOUNT_LIMITS_ACCOUNT_FK', 'account_limits') drop_constraint('ACCOUNT_LIMITS_RSE_ID_FK', 'account_limits') drop_table('account_limits') create_table('account_limits', sa.Column('account', sa.String(25)), sa.Column('rse_expression', sa.String(255)), sa.Column('name', sa.String(255)), sa.Column('value', sa.BigInteger), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('ACCOUNT_LIMITS_PK', 'account_limits', ['account', 'rse_expression', 'name']) create_check_constraint('ACCOUNT_LIMITS_CREATED_NN', 'account_limits', 'created_at is not null') create_check_constraint('ACCOUNT_LIMITS_UPDATED_NN', 'account_limits', 'updated_at is not null') create_foreign_key('ACCOUNT_LIMITS_ACCOUNT_FK', 'account_limits', 'accounts', ['account'], ['account'])
def upgrade(): ''' upgrade method ''' if context.get_context().dialect.name != 'sqlite': add_column('collection_replicas', sa.Column('available_replicas_cnt', sa.BigInteger())) add_column('collection_replicas', sa.Column('available_bytes', sa.BigInteger())) create_table('updated_col_rep', sa.Column('id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('did_type', DIDType.db_type(name='UPDATED_COL_REP_TYPE_CHK')), sa.Column('rse_id', GUID()), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('UPDATED_COL_REP_PK', 'updated_col_rep', ['id']) create_check_constraint('UPDATED_COL_REP_SCOPE_NN', 'updated_col_rep', 'scope IS NOT NULL') create_check_constraint('UPDATED_COL_REP_NAME_NN', 'updated_col_rep', 'name IS NOT NULL') create_index('UPDATED_COL_REP_SNR_IDX', 'updated_col_rep', ['scope', 'name', 'rse_id'])
def upgrade(): ''' upgrade method ''' create_table('sources', sa.Column('request_id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('rse_id', GUID()), sa.Column('dest_rse_id', GUID()), sa.Column('url', sa.String(2048)), sa.Column('ranking', sa.Integer), sa.Column('bytes', sa.BigInteger), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) if context.get_context().dialect.name != 'sqlite': create_primary_key('SOURCES_PK', 'sources', ['request_id', 'rse_id', 'scope', 'name']) create_foreign_key('SOURCES_REQ_ID_FK', 'sources', 'requests', ['request_id'], ['id']) create_foreign_key('SOURCES_REPLICAS_FK', 'sources', 'replicas', ['scope', 'name', 'rse_id'], ['scope', 'name', 'rse_id']) create_foreign_key('SOURCES_RSES_FK', 'sources', 'rses', ['rse_id'], ['id']) create_foreign_key('SOURCES_DST_RSES_FK', 'sources', 'rses', ['dest_rse_id'], ['id']) create_check_constraint('SOURCES_CREATED_NN', 'sources', 'created_at is not null') create_check_constraint('SOURCES_UPDATED_NN', 'sources', 'updated_at is not null') create_index('SOURCES_SRC_DST_IDX', 'sources', ['rse_id', 'dest_rse_id'])
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('licenses', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(), nullable=True), sa.Column('description', sa.String(), nullable=True), sa.Column('plain_text', sa.String(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('users_licenses', sa.Column('user', sa.BigInteger(), nullable=True), sa.Column('license', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['license'], ['licenses.id'], ), sa.ForeignKeyConstraint(['user'], ['users.id'], ) ) op.drop_index('idx_areas_of_interest_centroid', table_name='areas_of_interest') op.drop_index('idx_areas_of_interest_geometry', table_name='areas_of_interest') op.add_column('projects', sa.Column('license_id', sa.Integer(), nullable=True)) op.create_foreign_key('fk_licenses', 'projects', 'licenses', ['license_id'], ['id']) op.drop_index('idx_tasks_geometry', table_name='tasks') # ### end Alembic commands ### # Custom index, not created with reflection op.create_index('idx_username_lower', 'users', [text('lower(username)')])
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('messages', sa.Column('id', sa.Integer(), nullable=False), sa.Column('message', sa.String(), nullable=True), sa.Column('subject', sa.String(), nullable=True), sa.Column('from_user_id', sa.BigInteger(), nullable=True), sa.Column('to_user_id', sa.BigInteger(), nullable=True), sa.Column('date', sa.DateTime(), nullable=True), sa.Column('read', sa.Boolean(), nullable=True), sa.ForeignKeyConstraint(['from_user_id'], ['users.id'], ), sa.ForeignKeyConstraint(['to_user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_messages_to_user_id'), 'messages', ['to_user_id'], unique=False) # ### end Alembic commands ###
def upgrade(): op.create_table('moderatorlog', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.BigInteger(), nullable=False), sa.Column('moderator_id', sa.Integer(), nullable=True), sa.Column('board_id', sa.Integer(), nullable=True), sa.Column('type', sa.Integer(), nullable=False), sa.Column('text', sa.String(), nullable=False), sa.ForeignKeyConstraint(['board_id'], ['board.id'], ), sa.ForeignKeyConstraint(['moderator_id'], ['moderator.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_moderatorlog_board_id'), 'moderatorlog', ['board_id'], unique=False) op.create_index(op.f('ix_moderatorlog_date'), 'moderatorlog', ['date'], unique=False) op.create_index(op.f('ix_moderatorlog_moderator_id'), 'moderatorlog', ['moderator_id'], unique=False) op.create_index(op.f('ix_moderatorlog_type'), 'moderatorlog', ['type'], unique=False)
def upgrade(): op.alter_column('user', 'facebook_user_id', type_=sa.BigInteger) op.alter_column('user', 'twitter_user_id', type_=sa.BigInteger)
def upgrade(): context = op.get_context() connection = op.get_bind() op.create_table('message_blacklist', sa.Column('id', sa.Integer(), nullable=False), sa.Column('login_id', sa.BigInteger(), nullable=False), sa.Column('blacklist', postgresql.ARRAY(sa.Integer)), sa.ForeignKeyConstraint(['login_id'], ['login.id'], ondelete='CASCADE', name="ref_message_blacklist_login_id_to_login"), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('idx_message_blacklist_login_id'), 'message_blacklist', ['login_id'], unique=True)
def upgrade(): op.add_column('message', sa.Column('expiry_millis', sa.BigInteger))
def upgrade(): op.alter_column('trait_int', "value", type_=sa.BigInteger)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( 'individual_importances', sa.Column('model_id', sa.Integer(), nullable=False), sa.Column('entity_id', sa.BigInteger(), nullable=False), sa.Column('as_of_date', sa.DateTime(), nullable=False), sa.Column('feature', sa.String(), nullable=False), sa.Column('method', sa.String(), nullable=False), sa.Column('importance_score', sa.Text(), nullable=True), sa.ForeignKeyConstraint(['model_id'], ['results.models.model_id'], ), sa.PrimaryKeyConstraint( 'model_id', 'entity_id', 'as_of_date', 'feature', 'method', ), schema='results' ) op.create_table( 'list_predictions', sa.Column('model_id', sa.Integer(), nullable=False), sa.Column('entity_id', sa.BigInteger(), nullable=False), sa.Column('as_of_date', sa.DateTime(), nullable=False), sa.Column('score', sa.Numeric(), nullable=True), sa.Column('rank_abs', sa.Integer(), nullable=True), sa.Column('rank_pct', sa.Float(), nullable=True), sa.Column('matrix_uuid', sa.Text(), nullable=True), sa.Column('test_label_window', sa.Interval(), nullable=True), sa.ForeignKeyConstraint(['model_id'], ['results.models.model_id'], ), sa.PrimaryKeyConstraint('model_id', 'entity_id', 'as_of_date'), schema='results' ) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('yitu-feedback', sa.Column('id_', sa.BigInteger(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('contact', sa.Text(), nullable=True), sa.Column('date', sa.DateTime(), nullable=True), sa.Column('content', sa.Text(), nullable=True), sa.PrimaryKeyConstraint('id_') ) # ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('localstorage_devices', sa.Column('node_id', sa.Integer(), nullable=True), sa.Column('device', sa.String(length=64), nullable=False), sa.Column('size', sa.BigInteger(), nullable=False), sa.Column('volume_name', sa.String(length=255), nullable=False), sa.ForeignKeyConstraint(['node_id'], [u'nodes.id'], ), sa.PrimaryKeyConstraint('node_id', 'device') ) ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('categories', sa.Column('id', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('sub_categories', sa.Column('id', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('items', sa.Column('id', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('tier', sa.Integer(), nullable=False), sa.Column('category_id', sa.String(), nullable=False), sa.Column('sub_category_id', sa.String(), nullable=False), sa.ForeignKeyConstraint(['category_id'], ['categories.id'], ), sa.ForeignKeyConstraint(['sub_category_id'], ['sub_categories.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('market_orders', sa.Column('id', sa.Integer(), nullable=False), sa.Column('item_id', sa.String(), nullable=False), sa.Column('location_id', sa.Integer(), nullable=False), sa.Column('quality_level', sa.Integer(), nullable=False), sa.Column('enchantment_level', sa.Integer(), nullable=False), sa.Column('price', sa.BigInteger(), nullable=False), sa.Column('amount', sa.Integer(), nullable=False), sa.Column('is_buy_order', sa.Boolean(), nullable=True), sa.Column('expire_time', sa.DateTime(), nullable=False), sa.Column('ingest_time', sa.DateTime(), nullable=False), sa.Column('last_updated', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['item_id'], ['items.id'], ), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ###
def upgrade(): op.create_table( 'workspace', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('guid', sa.String(36), nullable=False, unique=True), sa.Column('name', sa.Text, nullable=False) )
def upgrade(): # making ontology terms strings for now # leaving out externalId, diseases, pheno, etc. mappings for now op.create_table( 'individual', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('guid', sa.String(36), nullable=False, unique=True), sa.Column('name', sa.Text), sa.Column('info', sa.PickleType), sa.Column('record_create_time', sa.Text), sa.Column('record_update_time', sa.Text), )
def upgrade(): op.execute( CreateSequence( Sequence('db_row_tile_row_id_seq', minvalue=0, start=0, increment=1) ) ) op.create_table( 'db_row', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('db_array_id', sa.BigInteger, sa.ForeignKey('db_array.id'), nullable=False), sa.Column('tile_row_id', sa.BigInteger, Sequence( 'db_row_tile_row_id_seq'), nullable=False) )
def upgrade(): op.create_table( 'db_array', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('guid', sa.String(36), nullable=False, unique=True), sa.Column('reference_set_id', sa.BigInteger, sa.ForeignKey('reference_set.id'), nullable=False), sa.Column('workspace_id', sa.BigInteger, sa.ForeignKey('workspace.id'), nullable=False), sa.Column('name', sa.Text, nullable=False) )
def upgrade(): op.create_table( 'reference_source_accession', sa.Column('reference_id', sa.BigInteger, sa.ForeignKey( 'reference.id'), primary_key=True), sa.Column('source_accession_id', sa.BigInteger, sa.ForeignKey( 'source_accession.id'), primary_key=True) )
def upgrade(): op.create_table( 'source_accession', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('accession_id', sa.Text, nullable=False) )
def upgrade(): op.create_table( 'variant_set', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('guid', sa.String(36), nullable=False, unique=True), sa.Column('name', sa.Text), sa.Column('reference_set_id', sa.BigInteger, sa.ForeignKey('reference_set.id'), nullable=False), # should this be the workspace sa.Column('dataset_id', sa.Text), # variant set metadata should be it's own schema sa.Column('variant_set_metadata', sa.Text) )
def upgrade(): op.create_table( 'reference_set_source_accession', sa.Column('reference_set_id', sa.BigInteger, sa.ForeignKey( 'reference_set.id'), primary_key=True), sa.Column('source_accession_id', sa.BigInteger, sa.ForeignKey( 'source_accession.id'), primary_key=True) )
def upgrade(): op.create_table( 'reference_set', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('guid', sa.String(36), nullable=False, unique=True), sa.Column('md5_checksum', sa.String(32)), sa.Column('description', sa.Text), sa.Column('source_uri', sa.Text), sa.Column('is_derived', sa.Boolean), sa.Column('ncbi_taxon_id', sa.Integer), sa.Column('assembly_id', sa.String(100)), sa.Column('offset_factor', sa.Float) )
def upgrade(): op.create_table( 'field', sa.Column('id', sa.BigInteger, primary_key=True), sa.Column('field', sa.Text, nullable=False) )
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'reference_set', sa.Column('next_tiledb_column_offset', sa.BigInteger(), nullable=False, default=0)) op.add_column( 'reference', sa.Column('tiledb_column_offset', sa.BigInteger(), nullable=True)) op.alter_column('reference', 'length', existing_type=sa.BIGINT(), nullable=False) op.alter_column('reference', 'name', existing_type=sa.TEXT(), nullable=False) op.create_unique_constraint('unique_name_per_reference_set_constraint', 'reference', ['reference_set_id', 'name']) op.create_index('unique_reference_set_id_offset_idx', 'reference', ['reference_set_id', 'tiledb_column_offset'], unique=True) op.drop_column('reference', 'offset') # Trigger on reference insertion op.execute('''\ CREATE OR REPLACE FUNCTION increment_next_column_in_reference_set_pgsql() RETURNS trigger AS $increment_next_column_in_reference_set_pgsql$ BEGIN UPDATE reference SET tiledb_column_offset=(select next_tiledb_column_offset from reference_set where id=NEW.reference_set_id) where NEW.tiledb_column_offset IS NULL and id=NEW.id; UPDATE reference_set SET next_tiledb_column_offset=next_tiledb_column_offset+NEW.length WHERE id = NEW.reference_set_id; RETURN NEW; END; $increment_next_column_in_reference_set_pgsql$ LANGUAGE plpgsql; CREATE TRIGGER increment_next_column_in_reference_set AFTER INSERT ON reference FOR EACH ROW EXECUTE PROCEDURE increment_next_column_in_reference_set_pgsql(); ''') ### end Alembic commands ###
def upgrade(): op.alter_column('bgp_speakers', 'local_as', nullable=False, type_=sa.BigInteger()) op.alter_column('bgp_peers', 'remote_as', nullable=False, type_=sa.BigInteger())
def _is_sqlalchemy_connectable(con): global _SQLALCHEMY_INSTALLED if _SQLALCHEMY_INSTALLED is None: try: import sqlalchemy _SQLALCHEMY_INSTALLED = True from distutils.version import LooseVersion ver = LooseVersion(sqlalchemy.__version__) # For sqlalchemy versions < 0.8.2, the BIGINT type is recognized # for a sqlite engine, which results in a warning when trying to # read/write a DataFrame with int64 values. (GH7433) if ver < '0.8.2': from sqlalchemy import BigInteger from sqlalchemy.ext.compiler import compiles @compiles(BigInteger, 'sqlite') def compile_big_int_sqlite(type_, compiler, **kw): return 'INTEGER' except ImportError: _SQLALCHEMY_INSTALLED = False if _SQLALCHEMY_INSTALLED: import sqlalchemy return isinstance(con, sqlalchemy.engine.Connectable) else: return False
def upgrade(): op.create_table( 'future_trade', sa.Column('trade_id', sa.BigInteger, primary_key=True), sa.Column('price', sa.Float, nullable=False), sa.Column('amount', sa.Integer, nullable=False), sa.Column('timestamp', sa.Integer, nullable=False), sa.Column('trade_type', sa.SmallInteger, nullable=False), sa.Column('contract_type', sa.SmallInteger, nullable=False), ) op.create_index('future_trade_timestamp_index', 'future_trade', ['timestamp'])
def upgrade(): op.create_table( 'future_tick', sa.Column('timestamp', sa.BigInteger, primary_key=True), sa.Column('low', sa.Float, nullable=False), sa.Column('buy', sa.Float, nullable=False), sa.Column('last', sa.Float, nullable=False), sa.Column('sell', sa.Float, nullable=False), sa.Column('high', sa.Float, nullable=False), sa.Column('volume', sa.Integer, nullable=False) )
def upgrade(): op.create_table( 'future_index', sa.Column('timestamp', sa.BigInteger, primary_key=True), sa.Column('value', sa.Float, nullable=False) )
def upgrade(): ''' upgrade method ''' add_column('requests', sa.Column('bytes', BigInteger)) add_column('requests', sa.Column('md5', String(32))) add_column('requests', sa.Column('adler32', String(8))) add_column('requests', sa.Column('dest_url', String(2048))) add_column('requests_history', sa.Column('bytes', BigInteger)) add_column('requests_history', sa.Column('md5', String(32))) add_column('requests_history', sa.Column('adler32', String(8))) add_column('requests_history', sa.Column('dest_url', String(2048)))
def upgrade(): ''' upgrade method ''' if context.get_context().dialect.name != 'sqlite': add_column('dataset_locks', sa.Column('length', sa.BigInteger())) add_column('dataset_locks', sa.Column('bytes', sa.BigInteger())) add_column('dataset_locks', sa.Column('accessed_at', sa.DateTime())) add_column('dids', sa.Column('accessed_at', sa.DateTime()))
def upgrade(): ''' upgrade method ''' create_table('quarantined_replicas', sa.Column('rse_id', GUID()), sa.Column('path', sa.String(1024)), sa.Column('md5', sa.String(32)), sa.Column('adler32', sa.String(8)), sa.Column('bytes', sa.BigInteger), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime)) create_table('quarantined_replicas_history', sa.Column('rse_id', GUID()), sa.Column('path', sa.String(1024)), sa.Column('md5', sa.String(32)), sa.Column('adler32', sa.String(8)), sa.Column('bytes', sa.BigInteger), sa.Column('updated_at', sa.DateTime), sa.Column('created_at', sa.DateTime), sa.Column('deleted_at', sa.DateTime)) if context.get_context().dialect.name not in ('sqlite'): create_primary_key('QURD_REPLICAS_STATE_PK', 'quarantined_replicas', ['rse_id', 'path']) create_check_constraint('QURD_REPLICAS_CREATED_NN', 'quarantined_replicas', 'created_at is not null') create_check_constraint('QURD_REPLICAS_UPDATED_NN', 'quarantined_replicas', 'updated_at is not null') create_foreign_key('QURD_REPLICAS_RSE_ID_FK', 'quarantined_replicas', 'rses', ['rse_id'], ['id'])
def upgrade(): ''' upgrade method ''' create_table('replicas_history', sa.Column('rse_id', GUID()), sa.Column('scope', sa.String(25)), sa.Column('name', sa.String(255)), sa.Column('bytes', sa.BigInteger)) if context.get_context().dialect.name != 'sqlite': create_primary_key('REPLICAS_HIST_PK', 'replicas_history', ['rse_id', 'scope', 'name']) # create_foreign_key('REPLICAS_HIST_LFN_FK', 'replicas_history', 'dids', ['scope', 'name'], ['scope', 'name']) create_foreign_key('REPLICAS_HIST_RSE_ID_FK', 'replicas_history', 'rses', ['rse_id'], ['id']) create_check_constraint('REPLICAS_HIST_SIZE_NN', 'replicas_history', 'bytes IS NOT NULL')
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('invitation', sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('package_id', sa.BigInteger(), nullable=True), sa.Column('email', sa.String(length=254), nullable=False), sa.Column('invited_at', sa.DateTime(), nullable=False), sa.ForeignKeyConstraint(['package_id'], ['package.id'], ), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('project_allowed_users', sa.Column('project_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.BigInteger(), nullable=True), sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ) ) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('project_chat', sa.Column('id', sa.BigInteger(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('time_stamp', sa.DateTime(), nullable=False), sa.Column('message', sa.String(length=250), nullable=False), sa.ForeignKeyConstraint(['project_id'], ['projects.id'], ), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_project_chat_project_id'), 'project_chat', ['project_id'], unique=False) # ### end Alembic commands ###