我们从Python开源项目中,提取了以下47个代码示例,用于说明如何使用sqlalchemy.TEXT。
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('privatemessages', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('user_id', sa.INTEGER(), nullable=False), sa.Column('from_user_id', sa.INTEGER(), nullable=True), sa.Column('to_user_id', sa.INTEGER(), nullable=True), sa.Column('subject', sa.VARCHAR(length=255), nullable=True), sa.Column('message', sa.TEXT(), nullable=True), sa.Column('date_created', sa.DATETIME(), nullable=True), sa.Column('trash', sa.BOOLEAN(), nullable=False), sa.Column('draft', sa.BOOLEAN(), nullable=False), sa.Column('unread', sa.BOOLEAN(), nullable=False), sa.ForeignKeyConstraint(['from_user_id'], [u'users.id'], ), sa.ForeignKeyConstraint(['to_user_id'], [u'users.id'], ), sa.ForeignKeyConstraint(['user_id'], [u'users.id'], ), sa.PrimaryKeyConstraint('id') ) op.drop_table('messages') op.drop_table('conversations') ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('housing_evals', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('uid', sa.VARCHAR(length=32), nullable=False), sa.Column('social_attended', sa.TEXT(), nullable=False), sa.Column('social_hosted', sa.TEXT(), nullable=False), sa.Column('technical_attended', sa.TEXT(), nullable=False), sa.Column('technical_hosted', sa.TEXT(), nullable=False), sa.Column('projects', sa.TEXT(), nullable=False), sa.Column('comments', sa.TEXT(), nullable=False), sa.Column('points', sa.INTEGER(), nullable=False), sa.Column('active', sa.BOOLEAN(), nullable=False), sa.Column('date_created', sa.DATE(), nullable=False), sa.PrimaryKeyConstraint('id') ) ### end Alembic commands ###
def _transaction_test(self): self.pandasSQL.execute("CREATE TABLE test_trans (A INT, B TEXT)") ins_sql = "INSERT INTO test_trans (A,B) VALUES (1, 'blah')" # Make sure when transaction is rolled back, no rows get inserted try: with self.pandasSQL.run_transaction() as trans: trans.execute(ins_sql) raise Exception('error') except: # ignore raised exception pass res = self.pandasSQL.read_query('SELECT * FROM test_trans') self.assertEqual(len(res), 0) # Make sure when transaction is committed, rows do get inserted with self.pandasSQL.run_transaction() as trans: trans.execute(ins_sql) res2 = self.pandasSQL.read_query('SELECT * FROM test_trans') self.assertEqual(len(res2), 1) # ----------------------------------------------------------------------------- # -- Testing the public API
def test_dtype(self): cols = ['A', 'B'] data = [(0.8, True), (0.9, None)] df = DataFrame(data, columns=cols) df.to_sql('dtype_test', self.conn) df.to_sql('dtype_test2', self.conn, dtype={'B': sqlalchemy.TEXT}) meta = sqlalchemy.schema.MetaData(bind=self.conn) meta.reflect() sqltype = meta.tables['dtype_test2'].columns['B'].type self.assertTrue(isinstance(sqltype, sqlalchemy.TEXT)) self.assertRaises(ValueError, df.to_sql, 'error', self.conn, dtype={'B': str}) # GH9083 df.to_sql('dtype_test3', self.conn, dtype={'B': sqlalchemy.String(10)}) meta.reflect() sqltype = meta.tables['dtype_test3'].columns['B'].type self.assertTrue(isinstance(sqltype, sqlalchemy.String)) self.assertEqual(sqltype.length, 10)
def test_execute_fail(self): create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a, b) ); """ cur = self.conn.cursor() cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) sql.execute('INSERT INTO test VALUES("foo", "baz", 2.567)', self.conn) try: sys.stdout = StringIO() self.assertRaises(Exception, sql.execute, 'INSERT INTO test VALUES("foo", "bar", 7)', self.conn) finally: sys.stdout = sys.__stdout__
def test_execute_closed_connection(self): create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a, b) ); """ cur = self.conn.cursor() cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) self.conn.close() try: sys.stdout = StringIO() self.assertRaises(Exception, sql.tquery, "select * from test", con=self.conn) finally: sys.stdout = sys.__stdout__ # Initialize connection again (needed for tearDown) self.setUp()
def _create(self, dicts, cols): exclude_cols = set() for name, col in cols.items(): if name in exclude_cols: continue for d in dicts: try: value = d.get(name, '') if value is None or not isinstance(value, (str, unicode)): continue if col.type.length < len(value): # col.type.length = int(1.618 * len(d[name]) + 0.5) col.type.length = int(1.2 * len(value) + 0.5) except: print(name, col.type, value, file=sys.stderr) raise if col.type.length > 48: col.type = sql.TEXT() exclude_cols.add(name) break
def variants_gene_columns(self): # all of these are also stored in the variant_impacts table. return [ sql.Column("gene", sql.String(20)), sql.Column("transcript", sql.String(20)), sql.Column("is_exonic", sql.Boolean()), sql.Column("is_coding", sql.Boolean()), sql.Column("is_lof", sql.Boolean()), sql.Column("is_splicing", sql.Boolean()), sql.Column("exon", sql.String(8)), sql.Column("codon_change", sql.TEXT()), sql.Column("aa_change", sql.TEXT()), sql.Column("aa_length", sql.String(8)), sql.Column("biotype", sql.String(50)), sql.Column("impact", sql.String(20)), sql.Column("impact_so", sql.String(20)), sql.Column("impact_severity", sql.String(4)), sql.Column("polyphen_pred", sql.String(20)), sql.Column("polyphen_score", sql.Float()), sql.Column("sift_pred", sql.String(20)), sql.Column("sift_score", sql.Float()), ]
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('privatemessages', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('user_id', sa.INTEGER(), nullable=False), sa.Column('from_user_id', sa.INTEGER(), nullable=True), sa.Column('to_user_id', sa.INTEGER(), nullable=True), sa.Column('subject', sa.VARCHAR(length=255), nullable=True), sa.Column('message', sa.TEXT(), nullable=True), sa.Column('date_created', sa.DateTime(), nullable=True), sa.Column('trash', sa.BOOLEAN(), nullable=False), sa.Column('draft', sa.BOOLEAN(), nullable=False), sa.Column('unread', sa.BOOLEAN(), nullable=False), sa.ForeignKeyConstraint(['from_user_id'], [u'users.id'], ), sa.ForeignKeyConstraint(['to_user_id'], [u'users.id'], ), sa.ForeignKeyConstraint(['user_id'], [u'users.id'], ), sa.PrimaryKeyConstraint('id') ) op.drop_table('messages') op.drop_table('conversations') ### end Alembic commands ###
def _build_sql_query(self, query, query_filters): sql_query = Package.query.join(Package.publisher) sa_filters = [] for f in query_filters: filter_class, filter_term = f.split(":") if filter_class == 'publisher': sa_filters.append(Publisher.name == filter_term) else: raise InvalidUsage("not supported any other filter right now") if len(sa_filters) > 0: sql_query = sql_query.filter(or_(*sa_filters)) if query != '*' or not query.strip(): sql_query = sql_query.join(Package.tags)\ .filter(PackageTag.descriptor.op('->>')('title') .cast(sqlalchemy.TEXT) .ilike("%{q}%".format(q=query)), PackageTag.tag == 'latest', Package.status == PackageStateEnum.active) return sql_query
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute("UPDATE translated_string SET base_string='' WHERE base_string is NULL") op.alter_column('translated_string', 'base_string', existing_type=sa.TEXT(), nullable=False) op.execute("UPDATE translated_string SET comment='' WHERE comment is NULL") op.alter_column('translated_string', 'comment', existing_type=sa.TEXT(), nullable=False) op.execute("UPDATE translated_string SET context='' WHERE context is NULL") op.alter_column('translated_string', 'context', existing_type=sa.TEXT(), nullable=False) op.execute("UPDATE translated_string SET translation='' WHERE translation is NULL") op.alter_column('translated_string', 'translation', existing_type=sa.TEXT(), nullable=False) op.execute("UPDATE translated_string SET translator_comment='' WHERE translator_comment is NULL") op.alter_column('translated_string', 'translator_comment', existing_type=sa.TEXT(), nullable=False) # ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.alter_column('translated_string', 'translator_comment', existing_type=sa.TEXT(), nullable=True) op.alter_column('translated_string', 'translation', existing_type=sa.TEXT(), nullable=True) op.alter_column('translated_string', 'context', existing_type=sa.TEXT(), nullable=True) op.alter_column('translated_string', 'comment', existing_type=sa.TEXT(), nullable=True) op.alter_column('translated_string', 'base_string', existing_type=sa.TEXT(), nullable=True) # ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('web_hook', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('name', sa.TEXT(), nullable=False, unique=True), sa.Column('description', sa.TEXT(), nullable=True), sa.Column('url', sa.TEXT(), nullable=False), sa.Column('shared_secret', sa.TEXT(), nullable=False), sa.Column('status', sa.Integer(), nullable=False), sa.Column('consecutive_failure_count', sa.Integer(), nullable=False), sa.Column('register_time', sa.TIMESTAMP(), nullable=False), sa.Column('created_by_uid', postgresql.UUID(as_uuid=True), nullable=True), sa.Column('permissions', sa.TEXT(), nullable=False, server_default='"[]"'), sa.PrimaryKeyConstraint('id') ) op.create_table('web_hook_token', sa.Column('token_id', sa.TEXT(), nullable=False), sa.Column('user_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('web_hook_id', postgresql.UUID(as_uuid=True), nullable=False), sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), sa.ForeignKeyConstraint(['web_hook_id'], ['web_hook.id'], ), sa.PrimaryKeyConstraint('user_id', 'web_hook_id') ) # ### end Alembic commands ###
def upgrade(): op.add_column('pods', sa.Column('direct_access', sa.TEXT(), autoincrement=False, nullable=True))
def downgrade(): bind = op.get_bind() Base.metadata.bind = bind ### commands auto generated by Alembic - please adjust! ### op.add_column(u'predefined_apps', sa.Column('template', sa.TEXT(), nullable=True)) op.drop_column(u'predefined_apps', 'is_deleted') downgrade_data(bind) op.alter_column(u'predefined_apps', u'template', nullable=False) op.drop_column(u'pods', 'template_version_id') op.drop_table('predefined_app_templates') ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('pages', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('ts', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('modified', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('modified_by_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('slug', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('title', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('content', sa.TEXT(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['created_by_id'], [u'users.id'], name=u'pages_created_by_id_fkey'), sa.ForeignKeyConstraint(['modified_by_id'], [u'users.id'], name=u'pages_modified_by_id_fkey'), sa.PrimaryKeyConstraint('id', name=u'pages_pkey') ) op.add_column('menus_items', sa.Column('is_public', sa.BOOLEAN(), autoincrement=False, nullable=True)) op.add_column('menus_items', sa.Column('roles', sa.VARCHAR(length=255), autoincrement=False, nullable=True)) op.add_column('menus_items', sa.Column('ts', postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) op.add_column('menus_items', sa.Column('page_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('menus_items', sa.Column('is_group_label', sa.BOOLEAN(), autoincrement=False, nullable=True)) op.add_column('menus_items', sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.create_foreign_key(u'menus_items_page_id_fkey', 'menus_items', 'pages', ['page_id'], ['id']) op.create_foreign_key(u'menus_items_created_by_id_fkey', 'menus_items', 'users', ['created_by_id'], ['id']) op.add_column('menus', sa.Column('ts', postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) op.add_column('menus', sa.Column('created_by_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.create_foreign_key(u'menus_created_by_id_fkey', 'menus', 'users', ['created_by_id'], ['id']) op.drop_table('menuitem_roles') ### end Alembic commands ###
def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('node', sa.Column('active_ignition_config', sa.TEXT(), autoincrement=False, nullable=False)) op.drop_table('provision') # ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column( 'reference_set', sa.Column('next_tiledb_column_offset', sa.BigInteger(), nullable=False, default=0)) op.add_column( 'reference', sa.Column('tiledb_column_offset', sa.BigInteger(), nullable=True)) op.alter_column('reference', 'length', existing_type=sa.BIGINT(), nullable=False) op.alter_column('reference', 'name', existing_type=sa.TEXT(), nullable=False) op.create_unique_constraint('unique_name_per_reference_set_constraint', 'reference', ['reference_set_id', 'name']) op.create_index('unique_reference_set_id_offset_idx', 'reference', ['reference_set_id', 'tiledb_column_offset'], unique=True) op.drop_column('reference', 'offset') # Trigger on reference insertion op.execute('''\ CREATE OR REPLACE FUNCTION increment_next_column_in_reference_set_pgsql() RETURNS trigger AS $increment_next_column_in_reference_set_pgsql$ BEGIN UPDATE reference SET tiledb_column_offset=(select next_tiledb_column_offset from reference_set where id=NEW.reference_set_id) where NEW.tiledb_column_offset IS NULL and id=NEW.id; UPDATE reference_set SET next_tiledb_column_offset=next_tiledb_column_offset+NEW.length WHERE id = NEW.reference_set_id; RETURN NEW; END; $increment_next_column_in_reference_set_pgsql$ LANGUAGE plpgsql; CREATE TRIGGER increment_next_column_in_reference_set AFTER INSERT ON reference FOR EACH ROW EXECUTE PROCEDURE increment_next_column_in_reference_set_pgsql(); ''') ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### # Drop trigger op.execute( 'DROP TRIGGER increment_next_column_in_reference_set ON reference CASCADE') op.add_column('reference', sa.Column('offset', sa.BIGINT(), autoincrement=False, nullable=True)) op.drop_index('unique_reference_set_id_offset_idx', table_name='reference') op.drop_constraint('unique_name_per_reference_set_constraint', 'reference', type_='unique') op.alter_column('reference', 'name', existing_type=sa.TEXT(), nullable=True) op.alter_column('reference', 'length', existing_type=sa.BIGINT(), nullable=True) op.drop_column('reference', 'tiledb_column_offset') op.drop_column('reference_set', 'next_tiledb_column_offset') ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('file', sa.Column('file_id', sa.BIGINT(), nullable=False), sa.Column('sha512_hash', sa.TEXT(), nullable=True), sa.Column('sha256_hash', sa.TEXT(), nullable=True), sa.Column('sha1_hash', sa.TEXT(), nullable=True), sa.Column('md5_hash', sa.TEXT(), nullable=True), sa.Column('size', sa.FLOAT(), nullable=True), sa.Column('mime_type', sa.VARCHAR(length=120), nullable=True), sa.Column('submitted_by', sa.VARCHAR(length=120), nullable=False), sa.Column('status', sa.VARCHAR(length=20), nullable=False), sa.Column('last_updated', sa.DATETIME(), nullable=False), sa.Column('first_seen', sa.DATETIME(), nullable=False), sa.PrimaryKeyConstraint('file_id') ) op.create_table('lookup_request', sa.Column('request_id', sa.BIGINT(), nullable=False), sa.Column('requested_at', sa.DATETIME(), nullable=False), sa.Column('requestor', sa.VARCHAR(length=120), nullable=False), sa.Column('file_id', sa.BIGINT(), nullable=True), sa.Column('lookup_hash', sa.TEXT(), nullable=False), sa.Column('result', sa.VARCHAR(length=20), nullable=False), sa.ForeignKeyConstraint(['file_id'], ['file.file_id'], ), sa.PrimaryKeyConstraint('request_id') ) # ### end Alembic commands ###
def _make_iris_table_metadata(self): sa = sqlalchemy metadata = sa.MetaData() iris = sa.Table('iris', metadata, sa.Column('SepalLength', sa.REAL), sa.Column('SepalWidth', sa.REAL), sa.Column('PetalLength', sa.REAL), sa.Column('PetalWidth', sa.REAL), sa.Column('Name', sa.TEXT) ) return iris
def test_execute_closed_connection(self): _skip_if_no_pymysql() drop_sql = "DROP TABLE IF EXISTS test" create_sql = """ CREATE TABLE test ( a TEXT, b TEXT, c REAL, PRIMARY KEY (a(5), b(5)) ); """ cur = self.conn.cursor() cur.execute(drop_sql) cur.execute(create_sql) sql.execute('INSERT INTO test VALUES("foo", "bar", 1.234)', self.conn) self.conn.close() try: sys.stdout = StringIO() self.assertRaises(Exception, sql.tquery, "select * from test", con=self.conn) finally: sys.stdout = sys.__stdout__ # Initialize connection again (needed for tearDown) self.setUp()
def downgrade(): """Downgrade the database to an older revision.""" # commands auto generated by Alembic - please adjust! ### op.add_column('similar_stacks', sa.Column('appstack_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('similar_stacks', sa.Column('dependency_list', postgresql.JSONB(), autoincrement=False, nullable=False)) op.drop_constraint(None, 'similar_stacks', type_='foreignkey') op.drop_constraint(None, 'similar_stacks', type_='foreignkey') op.create_foreign_key('similar_stacks_appstack_id_fkey', 'similar_stacks', 'app_stacks', ['appstack_id'], ['id']) op.drop_constraint('sim_unique', 'similar_stacks', type_='unique') op.drop_column('similar_stacks', 'stack_id') op.drop_column('similar_stacks', 'similarity_value') op.drop_column('similar_stacks', 'similar_stack_id') op.drop_column('similar_stacks', 'analysis') op.create_table('app_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='app_stacks_pkey')) op.create_table('reference_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('version', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('dependencies', postgresql.JSONB(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='reference_stacks_pkey'), sa.UniqueConstraint('name', 'version', name='stack_unique')) op.drop_table('stacks') # end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # commands auto generated by Alembic - please adjust! ### op.create_table('review', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('approved', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('user', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('timestamp', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('comment', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('epv', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['user'], ['user.id'], name='review_user_fkey'), sa.PrimaryKeyConstraint('id', name='review_pkey')) # end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_index('public_tags_value1_idx', 'tags', ['value'], unique=False) op.create_index('public_tags_project_id0_idx', 'tags', ['project_id'], unique=False) op.drop_index(op.f('ix_tags_value'), table_name='tags') op.drop_index(op.f('ix_tags_project_id'), table_name='tags') op.create_table('wikicomment', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('link', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.Column('users_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('datetimeon', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('content', sa.TEXT(), autoincrement=False, nullable=True), sa.PrimaryKeyConstraint('id', name='wikicomment_pkey') ) ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('queue', sa.Column('id', sa.BIGINT(), nullable=False), sa.Column('enqueued_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=False), sa.Column('dequeued_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('expected_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('schedule_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True), sa.Column('q_name', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('data', postgresql.JSON(astext_type=sa.Text()), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index('priority_idx', 'queue', ['schedule_at', 'expected_at'], unique=False) # ### end Alembic commands ###
def create_vcf_header_table(self): h = self.vcf.raw_header t = sql.Table("vcf_header", self.metadata, #sql.Column("vcf_header", sql.TEXT(len(h))) sql.Column("vcf_header", sql.TEXT) ) t.drop(self.engine, checkfirst=True) t.create() self.engine.execute(t.insert(), [dict(vcf_header=h.rstrip())])
def variants_default_columns(self): return [ sql.Column("variant_id", sql.Integer(), primary_key=True), sql.Column("chrom", sql.String(10)), sql.Column("start", sql.Integer()), sql.Column("end", sql.Integer()), sql.Column("vcf_id", sql.String(12)), #sql.Column("anno_id", Integer()), sql.Column("ref", sql.TEXT()), sql.Column("alt", sql.TEXT()), sql.Column("qual", sql.Float()), sql.Column("filter", sql.String(10)), ]
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('notes', sa.Column('body_html', sa.TEXT(), autoincrement=False, nullable=True)) ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('topic', sa.Column('id', sa.INTEGER(), server_default=sa.text(u"nextval('topic_id_seq'::regclass)"), nullable=False), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('title', sa.VARCHAR(length=128), autoincrement=False, nullable=False), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('is_deleted', sa.BOOLEAN(), server_default=sa.text(u'false'), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['user_id'], [u'user.id'], name=u'fk_topic_user'), sa.PrimaryKeyConstraint('id', name=u'topic_pkey'), postgresql_ignore_search_path=False ) op.create_table('topic_follow', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('topic_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('created_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['topic_id'], [u'topic.id'], name=u'fk_topic_follow_topic'), sa.ForeignKeyConstraint(['user_id'], [u'user.id'], name=u'fk_topic_follow_user'), sa.PrimaryKeyConstraint('id', name=u'topic_follow_pkey'), sa.UniqueConstraint('topic_id', 'user_id', name=u'ux_topic_follow_user_follow_topic') ) op.add_column('link', sa.Column('topic_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.create_foreign_key(u'fk_link_topic', 'link', 'topic', ['topic_id'], ['id']) ### end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # ### commands auto generated by Alembic - please adjust! ### op.add_column('similar_stacks', sa.Column('appstack_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('similar_stacks', sa.Column('dependency_list', postgresql.JSONB(), autoincrement=False, nullable=False)) op.drop_constraint(None, 'similar_stacks', type_='foreignkey') op.drop_constraint(None, 'similar_stacks', type_='foreignkey') op.create_foreign_key('similar_stacks_appstack_id_fkey', 'similar_stacks', 'app_stacks', ['appstack_id'], ['id']) op.drop_constraint('sim_unique', 'similar_stacks', type_='unique') op.drop_column('similar_stacks', 'stack_id') op.drop_column('similar_stacks', 'similarity_value') op.drop_column('similar_stacks', 'similar_stack_id') op.drop_column('similar_stacks', 'analysis') op.create_table('app_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='app_stacks_pkey')) op.create_table('reference_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('version', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('dependencies', postgresql.JSONB(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='reference_stacks_pkey'), sa.UniqueConstraint('name', 'version', name='stack_unique')) op.drop_table('stacks') # ### end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('review', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('approved', sa.BOOLEAN(), autoincrement=False, nullable=True), sa.Column('user', sa.INTEGER(), autoincrement=False, nullable=True), sa.Column('timestamp', postgresql.TIMESTAMP(), autoincrement=False, nullable=True), sa.Column('comment', sa.TEXT(), autoincrement=False, nullable=True), sa.Column('epv', sa.VARCHAR(length=255), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['user'], ['user.id'], name='review_user_fkey'), sa.PrimaryKeyConstraint('id', name='review_pkey')) # ### end Alembic commands ###
def downgrade(): op.create_table( 'ref', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(), nullable=False), sa.Column('body', sa.TEXT(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.drop_table('web_page_cache')
def upgrade(): op.add_column('base_string', sa.Column('plural', sa.Text(), nullable=True)) op.execute("UPDATE base_string SET plural='' WHERE plural is NULL") op.alter_column('base_string', 'plural', existing_type=sa.TEXT(), nullable=False)
def downgrade(): op.drop_table('translated_string') op.create_table('translated_string', sa.Column('id', postgresql.UUID(), server_default=sa.text('uuid_generate_v4()'), autoincrement=False, nullable=False), sa.Column('base_string', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('translation', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('comment', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('translator_comment', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('context', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('resource_pk', postgresql.UUID(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['resource_pk'], ['resource.id'], name='translated_string_resource_pk_fkey'), sa.PrimaryKeyConstraint('id', name='translated_string_pkey') ) op.drop_table('base_string')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('bangumi', sa.Column('type', sa.Integer(), nullable=False, server_default='2')) op.alter_column('bangumi', 'type', server_default=None) op.add_column('bangumi', sa.Column('libyk_so', sa.TEXT(), nullable=True)) ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('server_session', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('session_id', sa.TEXT(), nullable=True), sa.Column('data', sa.LargeBinary(), nullable=True), sa.Column('expiry', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('session_id') ) ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('task', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('type', sa.Integer(), nullable=False), sa.Column('content', sa.TEXT(), nullable=True), sa.Column('status', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.add_column(u'bangumi', sa.Column('delete_mark', sa.TIMESTAMP(), nullable=True)) op.add_column(u'episodes', sa.Column('delete_mark', sa.TIMESTAMP(), nullable=True)) # ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('feed', sa.Column('episode_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('bangumi_id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('download_url', sa.TEXT(), nullable=False), sa.Column('torrent_file_id', postgresql.UUID(as_uuid=True), nullable=True), sa.PrimaryKeyConstraint('episode_id') ) op.add_column(u'bangumi', sa.Column('acg_rip', sa.TEXT(), nullable=True)) op.add_column(u'bangumi', sa.Column('dmhy', sa.TEXT(), nullable=True)) ### end Alembic commands ###
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('announce', sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False), sa.Column('url', sa.TEXT(), nullable=True), sa.Column('image_url', sa.TEXT(), nullable=True), sa.Column('position', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.Column('start_time', sa.TIMESTAMP(), nullable=False), sa.Column('end_time', sa.TIMESTAMP(), nullable=False), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ###
def upgrade(): journal_root = os.path.join(os.environ['WEASYL_STORAGE_ROOT'], 'static/journal') connection = op.get_bind() query = connection.execute("SELECT journalid, position('h' in settings) != 0 FROM journal WHERE content IS NULL") updates = [] for journal_id, is_hidden in query: content = _try_read([ os.path.join(journal_root, '%d.txt' % (journal_id,)), _get_hash_path(journal_root, journal_id), ]) if content is not None: updates.append((journal_id, content)) elif not is_hidden: # Python 2 doesn’t have FileNotFoundError raise IOError("Missing journal file for non-hidden journal %d; hide journals with known missing files before running this migration" % (journal_id,)) if updates: connection.execute( """ UPDATE journal SET content = t.content FROM UNNEST (%(updates)s) AS t (journalid integer, content unknown) WHERE journal.journalid = t.journalid AND journal.content IS NULL """, updates=updates, ) connection.execute("UPDATE journal SET content = '(file missing)' WHERE content IS NULL AND position('h' in settings) != 0") op.alter_column('journal', 'content', existing_type=sa.TEXT(), nullable=False)
def downgrade(): op.alter_column('journal', 'content', existing_type=sa.TEXT(), nullable=True)
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.execute(CreateSequence(Sequence("endpoint_log_id_seq"))) op.execute(CreateSequence(Sequence("session_token_id_seq"))) op.execute(CreateSequence(Sequence("system_log_id_seq"))) op.execute(CreateSequence(Sequence("user_account_id_seq"))) op.create_table('endpoint_log', sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('endpoint_log_id_seq')"), nullable=False), sa.Column('start_utc', postgresql.TIMESTAMP(), nullable=False), sa.Column('duration_ms', sa.INTEGER(), nullable=False), sa.Column('endpoint', sa.TEXT(), nullable=True), sa.Column('username', sa.TEXT(), nullable=True), sa.Column('method', sa.TEXT(), nullable=True), sa.Column('http_code', sa.TEXT(), nullable=True), sa.Column('error_message', sa.TEXT(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('session_token', sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('session_token_id_seq')"), nullable=False), sa.Column('user_id', sa.TEXT(), nullable=True), sa.Column('token', sa.TEXT(), nullable=True), sa.Column('created_utc', postgresql.TIMESTAMP(), server_default=sa.text('now()'), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('system_log', sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('system_log_id_seq')"), nullable=False), sa.Column('event_utc', postgresql.TIMESTAMP(), server_default=sa.text('now()'), nullable=False), sa.Column('level', sa.TEXT(), nullable=True), sa.Column('message', sa.TEXT(), nullable=True), sa.Column('source', sa.TEXT(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_table('user_account', sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('user_account_id_seq')"), nullable=False), sa.Column('user_id', sa.TEXT(), nullable=True), sa.Column('email', sa.TEXT(), nullable=True), sa.Column('secret', sa.TEXT(), nullable=True), sa.Column('creation_utc', postgresql.TIMESTAMP(), server_default=sa.text('now()'), nullable=True), sa.Column('last_updated_utc', postgresql.TIMESTAMP(), server_default=sa.text('now()'), nullable=True), sa.PrimaryKeyConstraint('id') ) # ### end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('stacks', sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('stacks_id_seq'::regclass)"), nullable=False), sa.Column('is_ref_stack', sa.BOOLEAN(), autoincrement=False, nullable=False), sa.Column('stack_json', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='stacks_pkey'), postgresql_ignore_search_path=False) op.create_table('similar_components', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('fromcomponent', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('tocomponent', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('similarity_distance', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='similar_components_pkey'), sa.UniqueConstraint('fromcomponent', 'tocomponent', name='sim_comps')) op.create_table('similar_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('analysis', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True), sa.Column('similar_stack_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('similarity_value', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False), sa.Column('stack_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['similar_stack_id'], ['stacks.id'], name='similar_stacks_similar_stack_id_fkey'), sa.ForeignKeyConstraint(['stack_id'], ['stacks.id'], name='similar_stacks_stack_id_fkey'), sa.PrimaryKeyConstraint('id', name='similar_stacks_pkey'), sa.UniqueConstraint('stack_id', 'similar_stack_id', name='sim_unique')) op.create_table('esmarker', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('worker_result_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['worker_result_id'], ['worker_results.id'], name='esmarker_worker_result_id_fkey'), sa.PrimaryKeyConstraint('id', name='esmarker_pkey')) # ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('publisher', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('title', sa.Text(), nullable=True), sa.Column('private', sa.BOOLEAN(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_publisher_name'), 'publisher', ['name'], unique=True) op.create_table('user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('email', sa.TEXT(), nullable=True), sa.Column('secret', sa.TEXT(), nullable=True), sa.Column('name', sa.TEXT(), nullable=False), sa.Column('full_name', sa.TEXT(), nullable=True), sa.Column('auth0_id', sa.TEXT(), nullable=True), sa.Column('sysadmin', sa.BOOLEAN(), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_user_auth0_id'), 'user', ['auth0_id'], unique=False) op.create_index(op.f('ix_user_email'), 'user', ['email'], unique=False) op.create_index(op.f('ix_user_name'), 'user', ['name'], unique=True) op.create_table('package', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('name', sa.TEXT(), nullable=True), sa.Column('descriptor', sa.JSON(), nullable=True), sa.Column('status', sa.TEXT(), nullable=True), sa.Column('private', sa.BOOLEAN(), nullable=True), sa.Column('readme', sa.TEXT(), nullable=True), sa.Column('publisher_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['publisher_id'], [u'publisher.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name', 'publisher_id') ) op.create_index(op.f('ix_package_name'), 'package', ['name'], unique=False) op.create_index(op.f('ix_package_status'), 'package', ['status'], unique=False) op.create_table('publisher_user', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('publisher_id', sa.Integer(), nullable=False), sa.Column('role', sa.TEXT(), nullable=False), sa.ForeignKeyConstraint(['publisher_id'], [u'publisher.id'], ), sa.ForeignKeyConstraint(['user_id'], [u'user.id'], ), sa.PrimaryKeyConstraint('id', 'user_id', 'publisher_id') ) ### end Alembic commands ###