我们从Python开源项目中,提取了以下40个代码示例,用于说明如何使用sqlalchemy.dialects.postgresql.JSONB。
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.create_table('analyses', sa.Column('id', sa.Integer(), nullable=False), sa.Column('ecosystem', sa.Integer(), nullable=True), sa.Column('package', sa.String(length=255), nullable=True), sa.Column('version', sa.String(length=255), nullable=True), sa.Column('access_count', sa.Integer(), nullable=True), sa.Column('started_at', sa.DateTime(), nullable=True), sa.Column('finished_at', sa.DateTime(), nullable=True), sa.Column('analyses', postgresql.JSONB(), nullable=True), sa.Column('subtasks', postgresql.JSONB(), nullable=True), sa.Column('release', sa.String(length=255), nullable=True), sa.Column('audit', postgresql.JSONB(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('worker_results', sa.Column('id', sa.Integer(), nullable=False), sa.Column('worker', sa.String(length=255), nullable=True), sa.Column('analysis_id', sa.Integer(), nullable=True), sa.Column('task_result', postgresql.JSONB(), nullable=True), sa.ForeignKeyConstraint(['analysis_id'], ['analyses.id'], ), sa.PrimaryKeyConstraint('id')) # end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.create_table('batch', sa.Column('token', sa.Integer(), nullable=False), sa.Column('started_at', sa.DateTime(), nullable=True), sa.Column('epvs', postgresql.JSONB(), nullable=True), sa.PrimaryKeyConstraint('token')) op.create_table('review', sa.Column('id', sa.Integer(), nullable=False), sa.Column('approved', sa.Boolean(), nullable=True), sa.Column('user', sa.Integer(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('comment', sa.Text(), nullable=True), sa.Column('epv', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint(['user'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) # end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('stacks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('is_ref_stack', sa.Boolean(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id')) op.add_column('similar_stacks', sa.Column('analysis', postgresql.JSONB())) op.add_column('similar_stacks', sa.Column('similar_stack_id', sa.Integer(), nullable=False)) op.add_column('similar_stacks', sa.Column('similarity_value', sa.Float(), nullable=False)) op.add_column('similar_stacks', sa.Column('stack_id', sa.Integer(), nullable=False)) op.create_unique_constraint('sim_unique', 'similar_stacks', ['stack_id', 'similar_stack_id']) op.drop_constraint('similar_stacks_appstack_id_fkey', 'similar_stacks', type_='foreignkey') op.create_foreign_key(None, 'similar_stacks', 'stacks', ['stack_id'], ['id']) op.create_foreign_key(None, 'similar_stacks', 'stacks', ['similar_stack_id'], ['id']) op.drop_column('similar_stacks', 'dependency_list') op.drop_column('similar_stacks', 'appstack_id') op.drop_table('reference_stacks') op.drop_table('app_stacks') # ### end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('batch', sa.Column('token', sa.Integer(), nullable=False), sa.Column('started_at', sa.DateTime(), nullable=True), sa.Column('epvs', postgresql.JSONB(), nullable=True), sa.PrimaryKeyConstraint('token')) op.create_table('review', sa.Column('id', sa.Integer(), nullable=False), sa.Column('approved', sa.Boolean(), nullable=True), sa.Column('user', sa.Integer(), nullable=True), sa.Column('timestamp', sa.DateTime(), nullable=True), sa.Column('comment', sa.Text(), nullable=True), sa.Column('epv', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint(['user'], ['user.id'], ), sa.PrimaryKeyConstraint('id')) # ### end Alembic commands ###
def upgrade(): op.create_table('fda_dap', # Meta sa.Column('meta_id', sa.Text, unique=True), sa.Column('meta_source', sa.Text), sa.Column('meta_created', sa.DateTime(timezone=True)), sa.Column('meta_updated', sa.DateTime(timezone=True)), # General sa.Column('id', sa.Text, unique=True), sa.Column('documents', JSONB), sa.Column('approval_type', sa.Text), sa.Column('supplement_number', sa.Integer), sa.Column('action_date', sa.Date), sa.Column('fda_application_num', sa.Text), sa.Column('notes', sa.Text), )
def upgrade(): op.add_column( 'dataset', sa.Column( 'meta', postgresql.JSONB(), nullable=True ) ) op.execute(''' UPDATE "dataset" SET meta = jsonb_set(coalesce(meta, '{}'), '{organization_id}', to_jsonb(organization_id)) WHERE organization_id IS NOT NULL ''') op.drop_column('dataset', 'organization_id')
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('rule', sa.Column('conditions', postgresql.JSONB(), nullable=True)) op.add_column('rule', sa.Column('description', sa.String(), nullable=True)) op.drop_column('rule', 'action') op.drop_column('rule', 'type') op.drop_column('rule', 'config') ### end Alembic commands ###
def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('rule', sa.Column('config', postgresql.JSONB(), autoincrement=False, nullable=True)) op.add_column('rule', sa.Column('type', sa.VARCHAR(), autoincrement=False, nullable=False)) op.add_column('rule', sa.Column('action', postgresql.ENUM(u'added', u'removed', u'both', name='rule_actions'), autoincrement=False, nullable=False)) op.drop_column('rule', 'description') op.drop_column('rule', 'conditions') ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('rule', sa.Column('id', sa.Integer(), nullable=False), sa.Column('type', sa.String(), nullable=False), sa.Column('name', sa.String(), nullable=False), sa.Column('action', sa.Enum('added', 'removed', 'both', name='rule_actions'), nullable=False), sa.Column('alerters', postgresql.ARRAY(sa.String()), nullable=False), sa.Column('config', postgresql.JSONB(), nullable=True), sa.PrimaryKeyConstraint('id') ) ### end Alembic commands ###
def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('node', sa.Column('node_info', postgresql.JSONB(), server_default='{}', nullable=False)) op.drop_index('idx__rule__updated_at', table_name='rule') ### end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # commands auto generated by Alembic - please adjust! ### op.add_column('batch', sa.Column('epvs', postgresql.JSONB(), autoincrement=False, nullable=True)) op.drop_table('batches_versions') # end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.create_table('stack_analyses_request', sa.Column('id', sa.String(length=64), nullable=False), sa.Column('submitTime', sa.DateTime(), nullable=False), sa.Column('startTime', sa.DateTime(), nullable=True), sa.Column('endTime', sa.DateTime(), nullable=True), sa.Column('requestJson', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id')) # end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.create_table('stacks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('is_ref_stack', sa.Boolean(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id')) op.add_column('similar_stacks', sa.Column('analysis', postgresql.JSONB())) op.add_column('similar_stacks', sa.Column('similar_stack_id', sa.Integer(), nullable=False)) op.add_column('similar_stacks', sa.Column('similarity_value', sa.Float(), nullable=False)) op.add_column('similar_stacks', sa.Column('stack_id', sa.Integer(), nullable=False)) op.create_unique_constraint('sim_unique', 'similar_stacks', ['stack_id', 'similar_stack_id']) op.drop_constraint('similar_stacks_appstack_id_fkey', 'similar_stacks', type_='foreignkey') op.create_foreign_key(None, 'similar_stacks', 'stacks', ['stack_id'], ['id']) op.create_foreign_key(None, 'similar_stacks', 'stacks', ['similar_stack_id'], ['id']) op.drop_column('similar_stacks', 'dependency_list') op.drop_column('similar_stacks', 'appstack_id') op.drop_table('reference_stacks') op.drop_table('app_stacks') # end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # commands auto generated by Alembic - please adjust! ### op.add_column('similar_stacks', sa.Column('appstack_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('similar_stacks', sa.Column('dependency_list', postgresql.JSONB(), autoincrement=False, nullable=False)) op.drop_constraint(None, 'similar_stacks', type_='foreignkey') op.drop_constraint(None, 'similar_stacks', type_='foreignkey') op.create_foreign_key('similar_stacks_appstack_id_fkey', 'similar_stacks', 'app_stacks', ['appstack_id'], ['id']) op.drop_constraint('sim_unique', 'similar_stacks', type_='unique') op.drop_column('similar_stacks', 'stack_id') op.drop_column('similar_stacks', 'similarity_value') op.drop_column('similar_stacks', 'similar_stack_id') op.drop_column('similar_stacks', 'analysis') op.create_table('app_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='app_stacks_pkey')) op.create_table('reference_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('version', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('dependencies', postgresql.JSONB(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='reference_stacks_pkey'), sa.UniqueConstraint('name', 'version', name='stack_unique')) op.drop_table('stacks') # end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.create_table('reference_stacks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('version', sa.String(length=255), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('dependencies', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name', 'version', name='stack_unique')) # end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # commands auto generated by Alembic - please adjust! ### op.add_column('analyses', sa.Column('analyses', postgresql.JSONB(), autoincrement=False, nullable=True)) # end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # commands auto generated by Alembic - please adjust! ### op.alter_column('stack_analyses_request', 'requestJson', existing_type=postgresql.JSONB(), type_=sa.String(length=4096), existing_nullable=False) # end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # commands auto generated by Alembic - please adjust! ### op.alter_column('stack_analyses_request', 'requestJson', existing_type=sa.String(length=4096), type_=postgresql.JSONB(), existing_nullable=False) # end Alembic commands ###
def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': # Use the native JSON type. return dialect.type_descriptor(_JSONB()) else: return dialect.type_descriptor(self.impl)
def downgrade(): """Downgrade the database to an older revision.""" # ### commands auto generated by Alembic - please adjust! ### op.add_column('batch', sa.Column('epvs', postgresql.JSONB(), autoincrement=False, nullable=True)) op.drop_table('batches_versions') # ### end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('stack_analyses_request', sa.Column('id', sa.String(length=64), nullable=False), sa.Column('submitTime', sa.DateTime(), nullable=False), sa.Column('startTime', sa.DateTime(), nullable=True), sa.Column('endTime', sa.DateTime(), nullable=True), sa.Column('requestJson', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id')) # ### end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # ### commands auto generated by Alembic - please adjust! ### op.add_column('similar_stacks', sa.Column('appstack_id', sa.INTEGER(), autoincrement=False, nullable=True)) op.add_column('similar_stacks', sa.Column('dependency_list', postgresql.JSONB(), autoincrement=False, nullable=False)) op.drop_constraint(None, 'similar_stacks', type_='foreignkey') op.drop_constraint(None, 'similar_stacks', type_='foreignkey') op.create_foreign_key('similar_stacks_appstack_id_fkey', 'similar_stacks', 'app_stacks', ['appstack_id'], ['id']) op.drop_constraint('sim_unique', 'similar_stacks', type_='unique') op.drop_column('similar_stacks', 'stack_id') op.drop_column('similar_stacks', 'similarity_value') op.drop_column('similar_stacks', 'similar_stack_id') op.drop_column('similar_stacks', 'analysis') op.create_table('app_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='app_stacks_pkey')) op.create_table('reference_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('name', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('version', sa.VARCHAR(length=255), autoincrement=False, nullable=False), sa.Column('description', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('dependencies', postgresql.JSONB(), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='reference_stacks_pkey'), sa.UniqueConstraint('name', 'version', name='stack_unique')) op.drop_table('stacks') # ### end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('monitored_upstreams', sa.Column('id', sa.Integer(), nullable=False), sa.Column('package_id', sa.Integer(), nullable=True), sa.Column('url', sa.String(length=255), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('added_at', sa.DateTime(), nullable=False), sa.Column('deactivated_at', sa.DateTime(), nullable=True), sa.Column('active', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['package_id'], ['packages.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_monitored_upstreams_package_id'), 'monitored_upstreams', ['package_id'], unique=False) op.create_table('package_analyses', sa.Column('id', sa.Integer(), nullable=False), sa.Column('package_id', sa.Integer(), nullable=True), sa.Column('started_at', sa.DateTime(), nullable=True), sa.Column('finished_at', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['package_id'], ['packages.id'], ), sa.PrimaryKeyConstraint('id')) op.create_index(op.f('ix_package_analyses_package_id'), 'package_analyses', ['package_id'], unique=False) op.create_table('package_worker_results', sa.Column('id', sa.Integer(), nullable=False), sa.Column('package_analysis_id', sa.Integer(), nullable=True), sa.Column('worker', sa.String(length=255), nullable=True), sa.Column('worker_id', sa.String(length=64), nullable=True), sa.Column('external_request_id', sa.String(length=64), nullable=True), sa.Column('task_result', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('error', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['package_analysis_id'], ['package_analyses.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('worker_id')) op.create_index(op.f('ix_package_worker_results_package_analysis_id'), 'package_worker_results', ['package_analysis_id'], unique=False) op.create_index(op.f('ix_package_worker_results_worker'), 'package_worker_results', ['worker'], unique=False) # ### end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # ### commands auto generated by Alembic - please adjust! ### op.add_column('stack_analyses_request', sa.Column('result', postgresql.JSONB(), nullable=True)) op.add_column('stack_analyses_request', sa.Column('team', sa.String(length=64), nullable=True)) # ### end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('reference_stacks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('version', sa.String(length=255), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('dependencies', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name', 'version', name='stack_unique')) # ### end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # ### commands auto generated by Alembic - please adjust! ### op.add_column('analyses', sa.Column('analyses', postgresql.JSONB(), autoincrement=False, nullable=True)) # ### end Alembic commands ###
def downgrade(): """Downgrade the database to an older revision.""" # ### commands auto generated by Alembic - please adjust! ### op.alter_column('stack_analyses_request', 'requestJson', existing_type=sa.String(length=4096), type_=postgresql.JSONB(), existing_nullable=False) # ### end Alembic commands ###
def upgrade(): """Upgrade the database to a newer revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('app_stacks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('stack_json', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id')) op.create_table('similar_stacks', sa.Column('id', sa.Integer(), nullable=False), sa.Column('appstack_id', sa.Integer(), nullable=True), sa.Column('dependency_list', postgresql.JSONB(), nullable=False), sa.ForeignKeyConstraint(['appstack_id'], ['app_stacks.id'], ), sa.PrimaryKeyConstraint('id')) # ### end Alembic commands ###
def upgrade(): op.create_table('document', sa.Column('id', sa.Integer(), nullable=False), sa.Column('data', postgresql.JSONB(), nullable=False), sa.PrimaryKeyConstraint('id') )
def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': from sqlalchemy.dialects.postgresql import JSONB return dialect.type_descriptor(JSONB()) else: return dialect.type_descriptor(VARCHAR())
def upgrade(): op.add_column('pubmed', sa.Column('mesh_headings', JSONB))
def upgrade(): op.create_table('ictrp', # Meta sa.Column('meta_uuid', sa.Text), sa.Column('meta_source', sa.Text), sa.Column('meta_created', sa.DateTime(timezone=True)), sa.Column('meta_updated', sa.DateTime(timezone=True)), # Main sa.Column('register', sa.Text, primary_key=True), sa.Column('last_refreshed_on', sa.Date), sa.Column('main_id', sa.Text, primary_key=True), sa.Column('date_of_registration', sa.Text), sa.Column('primary_sponsor', sa.Text), sa.Column('public_title', sa.Text), sa.Column('scientific_title', sa.Text), sa.Column('date_of_first_enrollment', sa.Text), sa.Column('target_sample_size', sa.Integer), sa.Column('recruitment_status', sa.Text), sa.Column('url', sa.Text), sa.Column('study_type', sa.Text), sa.Column('study_design', sa.Text), sa.Column('study_phase', sa.Text), # Additional sa.Column('countries_of_recruitment', ARRAY(sa.Text)), sa.Column('contacts', JSONB), sa.Column('key_inclusion_exclusion_criteria', sa.Text), sa.Column('health_conditions_or_problems_studied', ARRAY(sa.Text)), sa.Column('interventions', ARRAY(sa.Text)), sa.Column('primary_outcomes', ARRAY(sa.Text)), sa.Column('secondary_outcomes', ARRAY(sa.Text)), sa.Column('secondary_ids', ARRAY(sa.Text)), sa.Column('sources_of_monetary_support', ARRAY(sa.Text)), sa.Column('secondary_sponsors', ARRAY(sa.Text)), )
def upgrade(): op.create_table('cochrane_reviews', sa.Column('meta_id', sa.Text), sa.Column('meta_created', sa.DateTime(timezone=True), server_default=sa.text('now()')), sa.Column('meta_updated', sa.DateTime(timezone=True), server_default=sa.text('now()')), sa.Column('meta_source', sa.Text), sa.Column('id', UUID, primary_key=True), sa.Column('study_type', sa.Text), sa.Column('file_name', sa.Text), sa.Column('robs', JSONB), sa.Column('study_id', sa.Text), sa.Column('refs', JSONB), sa.Column('doi_id', sa.Text), )
def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': if dialect.server_version_info >= (9, 4): self.using_native_json = True return dialect.type_descriptor(postgresql.JSONB()) if dialect.server_version_info >= (9, 2): self.using_native_json = True return dialect.type_descriptor(postgresql.JSON()) return dialect.type_descriptor(types.Text())
def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('profile', sa.Column('profile_name', sa.String(length=128), nullable=False), sa.Column('properties', JSONB(), nullable=False), sa.PrimaryKeyConstraint('profile_name', name=op.f('profile_pkey')) ) op.create_table('configuration', sa.Column('key', sa.String(length=128), nullable=False), sa.Column('value', sa.String(length=1024), nullable=False), sa.PrimaryKeyConstraint('key', name=op.f('configuration_pkey')) ) op.create_table('device', sa.Column('device_id', sa.Integer(), nullable=False, autoincrement=True), sa.Column('device_type', sa.String(length=64), nullable=False), sa.Column('properties', JSONB(), nullable=True), sa.Column('hostname', sa.String(length=256), nullable=True), sa.Column('ip_address', sa.String(length=64), nullable=True), sa.Column('mac_address', sa.String(length=64), nullable=True), sa.Column('profile_name', sa.String(length=128), nullable=True), sa.Column('deleted', sa.BOOLEAN(), server_default=false_just_for_sqlalchemy(), nullable=False), sa.PrimaryKeyConstraint('device_id', name=op.f('device_pkey')), sa.ForeignKeyConstraint(['profile_name'], ['profile.profile_name'], name='device_profile', match='SIMPLE', ondelete='NO ACTION', onupdate='NO ACTION') ) op.create_table('log', sa.Column('process', sa.String(length=128), nullable=True), sa.Column('timestamp', sa.DateTime(timezone=True), nullable=False, server_default=func.now()), sa.Column('level', sa.Integer(), nullable=False), sa.Column('device_id', sa.Integer(), nullable=True), sa.Column('message', sa.Text(), nullable=False), sa.ForeignKeyConstraint(['device_id'], ['device.device_id'], name='log_process', match='SIMPLE', ondelete='NO ACTION', onupdate='NO ACTION'), sa.CheckConstraint('level = ANY (ARRAY[0, 10, 15, 20, 30, 40, 50])', name=op.f('valid_log_levels')) ) creating_functions() # ### end Alembic commands ###
def map_column(self, mode: EditMode, request: Request, node: colander.SchemaNode, model: type, name: str, column: Column, column_type: TypeEngine) -> t.Tuple[colander.SchemaType, dict]: """Map non-relationship SQLAlchemy column to Colander SchemaNode. :return: Tuple(constructed colander.SchemaType, dict of addtional colander.SchemaNode construction arguments) """ logger.debug("Mapping field %s, mode %s, node %s, column %s, column type %s", name, mode, node, column, column_type) # Check for autogenerated columns (updated_at) if column.onupdate: if mode in (EditMode.edit, EditMode.add): return TypeOverridesHandling.drop, {} # Don't fill default values when added, as they are automatically populated if column.default: if mode == EditMode.add: return TypeOverridesHandling.drop, {} # Never add primary keys # NOTE: TODO: We need to preserve ids because of nesting mechanism and groupedit widget wants it id if column.primary_key: # TODO: Looks like column.autoincrement is set True by default, so we cannot use it here if mode in (EditMode.edit, EditMode.add): return TypeOverridesHandling.drop, {} if column.foreign_keys: # Handled by relationship mapper return TypeOverridesHandling.drop, {} elif isinstance(column_type, (PostgreSQLUUID, columns.UUID)): # UUID's cannot be22 edited if mode in (EditMode.add, EditMode.edit): return TypeOverridesHandling.drop, {} # But let's show them return fields.UUID(), dict(missing=colander.drop, widget=FriendlyUUIDWidget(readonly=True)) elif isinstance(column_type, Text): return colander.String(), dict(widget=deform.widget.TextAreaWidget()) elif isinstance(column_type, JSONB): return JSONValue(), dict(widget=JSONWidget()) elif isinstance(column_type, (JSONB, columns.JSONB)): # Can't edit JSON if mode in (EditMode.add, EditMode.edit): return TypeOverridesHandling.drop, {} return colander.String(), {} elif isinstance(column_type, LargeBinary): # Can't edit binary return TypeOverridesHandling.drop, {} elif isinstance(column_type, Geometry): # Can't edit geometry return TypeOverridesHandling.drop, {} elif isinstance(column_type, (INET, columns.INET)): return colander.String(), {} else: # Default mapping / unknown, let the parent handle return TypeOverridesHandling.unknown, {}
def downgrade(): """Downgrade the database to an older revision.""" # ### commands auto generated by Alembic - please adjust! ### op.create_table('stacks', sa.Column('id', sa.INTEGER(), server_default=sa.text("nextval('stacks_id_seq'::regclass)"), nullable=False), sa.Column('is_ref_stack', sa.BOOLEAN(), autoincrement=False, nullable=False), sa.Column('stack_json', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='stacks_pkey'), postgresql_ignore_search_path=False) op.create_table('similar_components', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('fromcomponent', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('tocomponent', sa.TEXT(), autoincrement=False, nullable=False), sa.Column('similarity_distance', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False), sa.PrimaryKeyConstraint('id', name='similar_components_pkey'), sa.UniqueConstraint('fromcomponent', 'tocomponent', name='sim_comps')) op.create_table('similar_stacks', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('analysis', postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=True), sa.Column('similar_stack_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.Column('similarity_value', postgresql.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=False), sa.Column('stack_id', sa.INTEGER(), autoincrement=False, nullable=False), sa.ForeignKeyConstraint(['similar_stack_id'], ['stacks.id'], name='similar_stacks_similar_stack_id_fkey'), sa.ForeignKeyConstraint(['stack_id'], ['stacks.id'], name='similar_stacks_stack_id_fkey'), sa.PrimaryKeyConstraint('id', name='similar_stacks_pkey'), sa.UniqueConstraint('stack_id', 'similar_stack_id', name='sim_unique')) op.create_table('esmarker', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('worker_result_id', sa.INTEGER(), autoincrement=False, nullable=True), sa.ForeignKeyConstraint(['worker_result_id'], ['worker_results.id'], name='esmarker_worker_result_id_fkey'), sa.PrimaryKeyConstraint('id', name='esmarker_pkey')) # ### end Alembic commands ###
def apply(self, bind=None, timeout=DEFAULT): cls = type(self._instance) values = self._values.copy() # handle JSON columns json_updates = {} for prop, value in self._props.items(): value = prop.save(self._instance, value) updates = json_updates.setdefault(prop.column_name, {}) if self._literal: updates[prop.name] = value else: if isinstance(value, int): value = sa.cast(value, sa.BigInteger) elif not isinstance(value, ClauseElement): value = sa.cast(value, sa.Unicode) updates[sa.cast(prop.name, sa.Unicode)] = value for column_name, updates in json_updates.items(): column = getattr(cls, column_name) if self._literal: values[column_name] = column.concat(updates) else: if isinstance(column.type, sa_pg.JSONB): func = sa.func.jsonb_build_object else: func = sa.func.json_build_object values[column_name] = column.concat( func(*itertools.chain(*updates.items()))) opts = dict(return_model=False) if timeout is not DEFAULT: opts['timeout'] = timeout clause = self._clause.values( **values, ).returning( *[getattr(cls, key) for key in values], ).execution_options(**opts) row = await cls.__metadata__.first(clause, bind=bind) if not row: raise NoSuchRowError() self._instance.__values__.update(row) for prop in self._props: prop.reload(self._instance) return self
def upgrade(): op.create_table('pubmed', # Meta sa.Column('meta_id', sa.Text, unique=True), sa.Column('meta_source', sa.Text), sa.Column('meta_created', sa.DateTime(timezone=True)), sa.Column('meta_updated', sa.DateTime(timezone=True)), # Medline sa.Column('pmid', sa.Text, primary_key=True), sa.Column('date_created', sa.Date), sa.Column('date_completed', sa.Date), sa.Column('date_revised', sa.Date), sa.Column('country', sa.Text), sa.Column('medline_ta', sa.Text), sa.Column('nlm_unique_id', sa.Text), sa.Column('issn_linking', sa.Text), # Journal sa.Column('journal_issn', sa.Text), sa.Column('journal_title', sa.Text), sa.Column('journal_iso', sa.Text), # Article sa.Column('article_title', sa.Text), sa.Column('article_abstract', sa.Text), sa.Column('article_authors', ARRAY(sa.Text)), sa.Column('article_language', sa.Text), sa.Column('article_publication_type_list', ARRAY(sa.Text)), sa.Column('article_vernacular_title', sa.Text), sa.Column('article_date', sa.Date), # Pubmed sa.Column('publication_status', sa.Text), sa.Column('identifiers_list', JSONB()), )
def upgrade(): op.create_table('artifact_set_members', sa.Column('set_id', sa.VARCHAR(length=40), nullable=False), sa.Column('artifact_id', sa.VARCHAR(length=40), nullable=False), sa.PrimaryKeyConstraint('set_id', 'artifact_id')) op.create_table('artifact_sets', sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('set_id', sa.VARCHAR(length=40), nullable=True), sa.Column('name', sa.VARCHAR(length=1000), nullable=True), sa.Column('created_at', pg.TIMESTAMP(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('runs', sa.Column('id', sa.VARCHAR(length=40), nullable=False), sa.Column('hostname', sa.VARCHAR(length=256), nullable=True), sa.Column('info', pg.JSONB(), nullable=True), sa.Column('created_at', pg.TIMESTAMP(), nullable=True), sa.PrimaryKeyConstraint('id')) op.create_table('artifacts', sa.Column('id', sa.VARCHAR(length=40), nullable=False), sa.Column('value_id', sa.VARCHAR(length=50), nullable=True), sa.Column('run_id', sa.VARCHAR(length=40), nullable=True), sa.Column('name', sa.VARCHAR(length=1000), nullable=True), sa.Column('version', sa.INTEGER(), nullable=True), sa.Column('fn_module', sa.VARCHAR(length=100), nullable=True), sa.Column('fn_name', sa.VARCHAR(length=100), nullable=True), sa.Column('composite', sa.BOOLEAN(), nullable=True), sa.Column('value_id_duration', sa.FLOAT(), nullable=True), sa.Column('compute_duration', sa.FLOAT(), nullable=True), sa.Column('hash_duration', sa.FLOAT(), nullable=True), sa.Column('computed_at', pg.TIMESTAMP(), nullable=True), sa.Column('added_at', pg.TIMESTAMP(), nullable=True), sa.Column('input_artifact_ids', pg.ARRAY(pg.VARCHAR(length=40)), nullable=True), sa.Column('inputs_json', pg.JSONB(), nullable=True), sa.Column('serializer', sa.VARCHAR(length=128), nullable=True), sa.Column('load_kwargs', pg.JSONB(), nullable=True), sa.Column('dump_kwargs', pg.JSONB(), nullable=True), sa.Column('custom_fields', pg.JSONB(), nullable=True), sa.ForeignKeyConstraint(['run_id'], ['runs.id'], ), sa.PrimaryKeyConstraint('id'))