DDL Internals

Contents

DDL Internals#

These are some of the constructs used to generate migration instructions. The APIs here build off of the sqlalchemy.schema.DDLElement and Custom SQL Constructs and Compilation Extension systems.

For programmatic usage of Alembic’s migration directives, the easiest route is to use the higher level functions given by Operation Directives.

class alembic.ddl.base.AddColumn(name: str, column: Column[Any], schema: quoted_name | str | None = None)#
class alembic.ddl.base.AlterColumn(name: str, column_name: str, schema: str | None = None, existing_type: TypeEngine | None = None, existing_nullable: bool | None = None, existing_server_default: _ServerDefault | None = None, existing_comment: str | None = None)#
class alembic.ddl.base.AlterTable(table_name: str, schema: quoted_name | str | None = None)#

Represent an ALTER TABLE statement.

Only the string name and optional schema name of the table is required, not a full Table object.

class alembic.ddl.base.ColumnComment(name: str, column_name: str, comment: str | None, **kw)#
class alembic.ddl.base.ColumnDefault(name: str, column_name: str, default: _ServerDefault | None, **kw)#
class alembic.ddl.base.ColumnName(name: str, column_name: str, newname: str, **kw)#
class alembic.ddl.base.ColumnNullable(name: str, column_name: str, nullable: bool, **kw)#
class alembic.ddl.base.ColumnType(name: str, column_name: str, type_: TypeEngine, **kw)#
class alembic.ddl.base.ComputedColumnDefault(name: str, column_name: str, default: Computed | None, **kw)#
class alembic.ddl.base.DropColumn(name: str, column: Column[Any], schema: str | None = None)#
class alembic.ddl.base.IdentityColumnDefault(name: str, column_name: str, default: Identity | None, impl: DefaultImpl, **kw)#
class alembic.ddl.base.RenameTable(old_table_name: str, new_table_name: quoted_name | str, schema: quoted_name | str | None = None)#
alembic.ddl.base.add_column(compiler: DDLCompiler, column: Column[Any], **kw) str#
alembic.ddl.base.alter_column(compiler: DDLCompiler, name: str) str#
alembic.ddl.base.alter_table(compiler: DDLCompiler, name: str, schema: str | None) str#
alembic.ddl.base.drop_column(compiler: DDLCompiler, name: str, **kw) str#
alembic.ddl.base.format_column_name(compiler: DDLCompiler, name: quoted_name | str | None) quoted_name | str#
alembic.ddl.base.format_server_default(compiler: DDLCompiler, default: _ServerDefault | None) str#
alembic.ddl.base.format_table_name(compiler: Compiled, name: quoted_name | str, schema: quoted_name | str | None) quoted_name | str#
alembic.ddl.base.format_type(compiler: DDLCompiler, type_: TypeEngine) str#
alembic.ddl.base.quote_dotted(name: quoted_name | str, quote: partial) quoted_name | str#

quote the elements of a dotted name

alembic.ddl.base.visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) str#
alembic.ddl.base.visit_column_default(element: ColumnDefault, compiler: DDLCompiler, **kw) str#
alembic.ddl.base.visit_column_name(element: ColumnName, compiler: DDLCompiler, **kw) str#
alembic.ddl.base.visit_column_nullable(element: ColumnNullable, compiler: DDLCompiler, **kw) str#
alembic.ddl.base.visit_column_type(element: ColumnType, compiler: DDLCompiler, **kw) str#
alembic.ddl.base.visit_computed_column(element: ComputedColumnDefault, compiler: DDLCompiler, **kw)#
alembic.ddl.base.visit_drop_column(element: DropColumn, compiler: DDLCompiler, **kw) str#
alembic.ddl.base.visit_identity_column(element: IdentityColumnDefault, compiler: DDLCompiler, **kw)#
alembic.ddl.base.visit_rename_table(element: RenameTable, compiler: DDLCompiler, **kw) str#
class alembic.ddl.impl.DefaultImpl(dialect: Dialect, connection: Connection | None, as_sql: bool, transactional_ddl: bool | None, output_buffer: TextIO | None, context_opts: Dict[str, Any])#

Provide the entrypoint for major migration operations, including database-specific behavioral variances.

While individual SQL/DDL constructs already provide for database-specific implementations, variances here allow for entirely different sequences of operations to take place for a particular migration, such as SQL Server’s special ‘IDENTITY INSERT’ step for bulk inserts.

add_column(table_name: str, column: Column[Any], schema: str | quoted_name | None = None) None#
add_constraint(const: Any) None#
adjust_reflected_dialect_options(reflected_object: Dict[str, Any], kind: str) Dict[str, Any]#
alter_column(table_name: str, column_name: str, nullable: bool | None = None, server_default: _ServerDefault | Literal[False] = False, name: str | None = None, type_: TypeEngine | None = None, schema: str | None = None, autoincrement: bool | None = None, comment: str | Literal[False] | None = False, existing_comment: str | None = None, existing_type: TypeEngine | None = None, existing_server_default: _ServerDefault | None = None, existing_nullable: bool | None = None, existing_autoincrement: bool | None = None, **kw: Any) None#
autogen_column_reflect(inspector, table, column_info)#

A hook that is attached to the ‘column_reflect’ event for when a Table is reflected from the database during the autogenerate process.

Dialects can elect to modify the information gathered here.

property bind: Connection | None#
bulk_insert(table: TableClause | Table, rows: List[dict], multiinsert: bool = True) None#
cast_for_batch_migrate(existing, existing_transfer, new_type)#
command_terminator = ';'#
compare_indexes(metadata_index: Index, reflected_index: Index) ComparisonResult#

Compare two indexes by comparing the signature generated by create_index_sig.

This method returns a ComparisonResult.

compare_server_default(inspector_column, metadata_column, rendered_metadata_default, rendered_inspector_default)#
compare_type(inspector_column: Column[Any], metadata_column: Column) bool#

Returns True if there ARE differences between the types of the two columns. Takes impl.type_synonyms into account between retrospected and metadata types

compare_unique_constraint(metadata_constraint: UniqueConstraint, reflected_constraint: UniqueConstraint) ComparisonResult#

Compare two unique constraints by comparing the two signatures.

The arguments are two tuples that contain the unique constraint and the signatures generated by create_unique_constraint_sig.

This method returns a ComparisonResult.

correct_for_autogen_constraints(conn_uniques: Set[UniqueConstraint], conn_indexes: Set[Index], metadata_unique_constraints: Set[UniqueConstraint], metadata_indexes: Set[Index]) None#
correct_for_autogen_foreignkeys(conn_fks: Set[ForeignKeyConstraint], metadata_fks: Set[ForeignKeyConstraint]) None#
create_column_comment(column: ColumnElement[Any]) None#
create_index(index: Index, **kw: Any) None#
create_table(table: Table, **kw: Any) None#
create_table_comment(table: Table) None#
drop_column(table_name: str, column: Column[Any], schema: str | None = None, **kw) None#
drop_constraint(const: Constraint) None#
drop_index(index: Index, **kw: Any) None#
drop_table(table: Table, **kw: Any) None#
drop_table_comment(table: Table) None#
emit_begin() None#

Emit the string BEGIN, or the backend-specific equivalent, on the current connection context.

This is used in offline mode and typically via EnvironmentContext.begin_transaction().

emit_commit() None#

Emit the string COMMIT, or the backend-specific equivalent, on the current connection context.

This is used in offline mode and typically via EnvironmentContext.begin_transaction().

execute(sql: Executable | str, execution_options: dict[str, Any] | None = None) None#
classmethod get_by_dialect(dialect: Dialect) Type[DefaultImpl]#
identity_attrs_ignore: Tuple[str, ...] = ('order', 'on_null')#
prep_table_for_batch(batch_impl: ApplyBatchImpl, table: Table) None#

perform any operations needed on a table before a new one is created to replace it in batch mode.

the PG dialect uses this to drop constraints on the table before the new one uses those same names.

rename_table(old_table_name: str, new_table_name: str | quoted_name, schema: str | quoted_name | None = None) None#
render_ddl_sql_expr(expr: ClauseElement, is_server_default: bool = False, **kw: Any) str#

Render a SQL expression that is typically a server default, index expression, etc.

render_type(type_obj: TypeEngine, autogen_context: AutogenContext) str | Literal[False]#
requires_recreate_in_batch(batch_op: BatchOperationsImpl) bool#

Return True if the given BatchOperationsImpl would need the table to be recreated and copied in order to proceed.

Normally, only returns True on SQLite when operations other than add_column are present.

start_migrations() None#

A hook called when EnvironmentContext.run_migrations() is called.

Implementations can set up per-migration-run state here.

static_output(text: str) None#
transactional_ddl = False#
type_arg_extract: Sequence[str] = ()#
type_synonyms: Tuple[Set[str], ...] = ({'DECIMAL', 'NUMERIC'},)#
version_table_impl(*, version_table: str, version_table_schema: str | None, version_table_pk: bool, **kw: Any) Table#

Generate a Table object which will be used as the structure for the Alembic version table.

Third party dialects may override this hook to provide an alternate structure for this Table; requirements are only that it be named based on the version_table parameter and contains at least a single string-holding column named version_num.

New in version 1.14.

class alembic.ddl.impl.ImplMeta(classname: str, bases: Tuple[Type[DefaultImpl]], dict_: Dict[str, Any])#
class alembic.ddl.impl.Params(token0, tokens, args, kwargs)#

Create new instance of Params(token0, tokens, args, kwargs)

args: List[str]#

Alias for field number 2

kwargs: Dict[str, str]#

Alias for field number 3

token0: str#

Alias for field number 0

tokens: List[str]#

Alias for field number 1

MySQL#

class alembic.ddl.mysql.MariaDBImpl(dialect: Dialect, connection: Connection | None, as_sql: bool, transactional_ddl: bool | None, output_buffer: TextIO | None, context_opts: Dict[str, Any])#

Bases: MySQLImpl

memo: dict#
class alembic.ddl.mysql.MySQLAlterDefault(name: str, column_name: str, default: _ServerDefault, schema: str | None = None)#

Bases: AlterColumn

class alembic.ddl.mysql.MySQLChangeColumn(name: str, column_name: str, schema: str | None = None, newname: str | None = None, type_: TypeEngine | None = None, nullable: bool | None = None, default: _ServerDefault | Literal[False] | None = False, autoincrement: bool | None = None, comment: str | Literal[False] | None = False)#

Bases: AlterColumn

class alembic.ddl.mysql.MySQLImpl(dialect: Dialect, connection: Connection | None, as_sql: bool, transactional_ddl: bool | None, output_buffer: TextIO | None, context_opts: Dict[str, Any])#

Bases: DefaultImpl

alter_column(table_name: str, column_name: str, nullable: bool | None = None, server_default: _ServerDefault | Literal[False] = False, name: str | None = None, type_: TypeEngine | None = None, schema: str | None = None, existing_type: TypeEngine | None = None, existing_server_default: _ServerDefault | None = None, existing_nullable: bool | None = None, autoincrement: bool | None = None, existing_autoincrement: bool | None = None, comment: str | Literal[False] | None = False, existing_comment: str | None = None, **kw: Any) None#
compare_server_default(inspector_column, metadata_column, rendered_metadata_default, rendered_inspector_default)#
correct_for_autogen_constraints(conn_unique_constraints, conn_indexes, metadata_unique_constraints, metadata_indexes)#
correct_for_autogen_foreignkeys(conn_fks, metadata_fks)#
drop_constraint(const: Constraint) None#
memo: dict#
transactional_ddl = False#
type_arg_extract: Sequence[str] = ['character set ([\\w\\-_]+)', 'collate ([\\w\\-_]+)']#
type_synonyms: Tuple[Set[str], ...] = ({'DECIMAL', 'NUMERIC'}, {'BOOL', 'TINYINT'}, {'JSON', 'LONGTEXT'})#
class alembic.ddl.mysql.MySQLModifyColumn(name: str, column_name: str, schema: str | None = None, newname: str | None = None, type_: TypeEngine | None = None, nullable: bool | None = None, default: _ServerDefault | Literal[False] | None = False, autoincrement: bool | None = None, comment: str | Literal[False] | None = False)#

Bases: MySQLChangeColumn

MS-SQL#

class alembic.ddl.mssql.MSSQLImpl(*arg, **kw)#

Bases: DefaultImpl

adjust_reflected_dialect_options(reflected_object: Dict[str, Any], kind: str) Dict[str, Any]#
alter_column(table_name: str, column_name: str, nullable: bool | None = None, server_default: _ServerDefault | Literal[False] | None = False, name: str | None = None, type_: TypeEngine | None = None, schema: str | None = None, existing_type: TypeEngine | None = None, existing_server_default: _ServerDefault | None = None, existing_nullable: bool | None = None, **kw: Any) None#
batch_separator = 'GO'#
bulk_insert(table: TableClause | Table, rows: List[dict], **kw: Any) None#
compare_server_default(inspector_column, metadata_column, rendered_metadata_default, rendered_inspector_default)#
create_index(index: Index, **kw: Any) None#
drop_column(table_name: str, column: Column[Any], schema: str | None = None, **kw) None#
emit_begin() None#

Emit the string BEGIN, or the backend-specific equivalent, on the current connection context.

This is used in offline mode and typically via EnvironmentContext.begin_transaction().

emit_commit() None#

Emit the string COMMIT, or the backend-specific equivalent, on the current connection context.

This is used in offline mode and typically via EnvironmentContext.begin_transaction().

identity_attrs_ignore: Tuple[str, ...] = ('order', 'on_null', 'minvalue', 'maxvalue', 'nominvalue', 'nomaxvalue', 'cycle', 'cache')#
memo: dict#
transactional_ddl = True#
type_synonyms: Tuple[Set[str], ...] = ({'DECIMAL', 'NUMERIC'}, {'NVARCHAR', 'VARCHAR'})#
alembic.ddl.mssql.mssql_add_column(compiler: MSDDLCompiler, column: Column[Any], **kw) str#
alembic.ddl.mssql.visit_add_column(element: AddColumn, compiler: MSDDLCompiler, **kw) str#
alembic.ddl.mssql.visit_column_default(element: ColumnDefault, compiler: MSDDLCompiler, **kw) str#
alembic.ddl.mssql.visit_column_nullable(element: ColumnNullable, compiler: MSDDLCompiler, **kw) str#
alembic.ddl.mssql.visit_column_type(element: ColumnType, compiler: MSDDLCompiler, **kw) str#
alembic.ddl.mssql.visit_rename_column(element: ColumnName, compiler: MSDDLCompiler, **kw) str#
alembic.ddl.mssql.visit_rename_table(element: RenameTable, compiler: MSDDLCompiler, **kw) str#

Postgresql#

class alembic.ddl.postgresql.CreateExcludeConstraintOp(constraint_name: sqla_compat._ConstraintName, table_name: str | quoted_name, elements: Sequence[Tuple[str, str]] | Sequence[Tuple[ColumnClause[Any], str]], where: ColumnElement[bool] | str | None = None, schema: str | None = None, _orig_constraint: ExcludeConstraint | None = None, **kw)#

Bases: AddConstraintOp

Represent a create exclude constraint operation.

classmethod batch_create_exclude_constraint(operations: BatchOperations, constraint_name: str, *elements: Any, **kw: Any) Table | None#

This method is proxied on the BatchOperations class, via the BatchOperations.create_exclude_constraint() method.

constraint_type = 'exclude'#
classmethod create_exclude_constraint(operations: Operations, constraint_name: str, table_name: str, *elements: Any, **kw: Any) Table | None#

This method is proxied on the Operations class, via the Operations.create_exclude_constraint() method.

classmethod from_constraint(constraint: ExcludeConstraint) CreateExcludeConstraintOp#
to_constraint(migration_context: MigrationContext | None = None) ExcludeConstraint#
class alembic.ddl.postgresql.PostgresqlColumnType(name: str, column_name: str, type_: TypeEngine, **kw)#

Bases: AlterColumn

class alembic.ddl.postgresql.PostgresqlImpl(dialect: Dialect, connection: Connection | None, as_sql: bool, transactional_ddl: bool | None, output_buffer: TextIO | None, context_opts: Dict[str, Any])#

Bases: DefaultImpl

adjust_reflected_dialect_options(reflected_options: Dict[str, Any], kind: str) Dict[str, Any]#
alter_column(table_name: str, column_name: str, nullable: bool | None = None, server_default: _ServerDefault | Literal[False] = False, name: str | None = None, type_: TypeEngine | None = None, schema: str | None = None, autoincrement: bool | None = None, existing_type: TypeEngine | None = None, existing_server_default: _ServerDefault | None = None, existing_nullable: bool | None = None, existing_autoincrement: bool | None = None, **kw: Any) None#
autogen_column_reflect(inspector, table, column_info)#

A hook that is attached to the ‘column_reflect’ event for when a Table is reflected from the database during the autogenerate process.

Dialects can elect to modify the information gathered here.

compare_indexes(metadata_index: Index, reflected_index: Index) ComparisonResult#

Compare two indexes by comparing the signature generated by create_index_sig.

This method returns a ComparisonResult.

compare_server_default(inspector_column, metadata_column, rendered_metadata_default, rendered_inspector_default)#
compare_unique_constraint(metadata_constraint: UniqueConstraint, reflected_constraint: UniqueConstraint) ComparisonResult#

Compare two unique constraints by comparing the two signatures.

The arguments are two tuples that contain the unique constraint and the signatures generated by create_unique_constraint_sig.

This method returns a ComparisonResult.

correct_for_autogen_constraints(conn_unique_constraints, conn_indexes, metadata_unique_constraints, metadata_indexes)#
create_index(index: Index, **kw: Any) None#
memo: dict#
prep_table_for_batch(batch_impl, table)#

perform any operations needed on a table before a new one is created to replace it in batch mode.

the PG dialect uses this to drop constraints on the table before the new one uses those same names.

render_ddl_sql_expr(expr: ClauseElement, is_server_default: bool = False, is_index: bool = False, **kw: Any) str#

Render a SQL expression that is typically a server default, index expression, etc.

render_type(type_: TypeEngine, autogen_context: AutogenContext) str | Literal[False]#
transactional_ddl = True#
type_synonyms: Tuple[Set[str], ...] = ({'DECIMAL', 'NUMERIC'}, {'DOUBLE PRECISION', 'FLOAT'})#
alembic.ddl.postgresql.visit_column_comment(element: ColumnComment, compiler: PGDDLCompiler, **kw) str#
alembic.ddl.postgresql.visit_column_type(element: PostgresqlColumnType, compiler: PGDDLCompiler, **kw) str#
alembic.ddl.postgresql.visit_identity_column(element: IdentityColumnDefault, compiler: PGDDLCompiler, **kw)#
alembic.ddl.postgresql.visit_rename_table(element: RenameTable, compiler: PGDDLCompiler, **kw) str#

SQLite#

class alembic.ddl.sqlite.SQLiteImpl(dialect: Dialect, connection: Connection | None, as_sql: bool, transactional_ddl: bool | None, output_buffer: TextIO | None, context_opts: Dict[str, Any])#

Bases: DefaultImpl

add_constraint(const: Constraint)#
autogen_column_reflect(inspector: Inspector, table: Table, column_info: Dict[str, Any]) None#

A hook that is attached to the ‘column_reflect’ event for when a Table is reflected from the database during the autogenerate process.

Dialects can elect to modify the information gathered here.

cast_for_batch_migrate(existing: Column[Any], existing_transfer: Dict[str, TypeEngine | Cast], new_type: TypeEngine) None#
compare_server_default(inspector_column: Column[Any], metadata_column: Column[Any], rendered_metadata_default: str | None, rendered_inspector_default: str | None) bool#
correct_for_autogen_constraints(conn_unique_constraints, conn_indexes, metadata_unique_constraints, metadata_indexes)#
drop_constraint(const: Constraint)#
memo: dict#
render_ddl_sql_expr(expr: ClauseElement, is_server_default: bool = False, **kw) str#

Render a SQL expression that is typically a server default, index expression, etc.

requires_recreate_in_batch(batch_op: BatchOperationsImpl) bool#

Return True if the given BatchOperationsImpl would need the table to be recreated and copied in order to proceed.

Normally, only returns True on SQLite when operations other than add_column are present.

transactional_ddl = False#

SQLite supports transactional DDL, but pysqlite does not: see: http://bugs.python.org/issue10740

alembic.ddl.sqlite.visit_rename_table(element: RenameTable, compiler: DDLCompiler, **kw) str#