batch.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718
  1. # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
  2. # mypy: no-warn-return-any, allow-any-generics
  3. from __future__ import annotations
  4. from typing import Any
  5. from typing import Dict
  6. from typing import List
  7. from typing import Optional
  8. from typing import Tuple
  9. from typing import TYPE_CHECKING
  10. from typing import Union
  11. from sqlalchemy import CheckConstraint
  12. from sqlalchemy import Column
  13. from sqlalchemy import ForeignKeyConstraint
  14. from sqlalchemy import Index
  15. from sqlalchemy import MetaData
  16. from sqlalchemy import PrimaryKeyConstraint
  17. from sqlalchemy import schema as sql_schema
  18. from sqlalchemy import select
  19. from sqlalchemy import Table
  20. from sqlalchemy import types as sqltypes
  21. from sqlalchemy.sql.schema import SchemaEventTarget
  22. from sqlalchemy.util import OrderedDict
  23. from sqlalchemy.util import topological
  24. from ..util import exc
  25. from ..util.sqla_compat import _columns_for_constraint
  26. from ..util.sqla_compat import _copy
  27. from ..util.sqla_compat import _copy_expression
  28. from ..util.sqla_compat import _ensure_scope_for_ddl
  29. from ..util.sqla_compat import _fk_is_self_referential
  30. from ..util.sqla_compat import _idx_table_bound_expressions
  31. from ..util.sqla_compat import _is_type_bound
  32. from ..util.sqla_compat import _remove_column_from_collection
  33. from ..util.sqla_compat import _resolve_for_variant
  34. from ..util.sqla_compat import constraint_name_defined
  35. from ..util.sqla_compat import constraint_name_string
  36. if TYPE_CHECKING:
  37. from typing import Literal
  38. from sqlalchemy.engine import Dialect
  39. from sqlalchemy.sql.elements import ColumnClause
  40. from sqlalchemy.sql.elements import quoted_name
  41. from sqlalchemy.sql.functions import Function
  42. from sqlalchemy.sql.schema import Constraint
  43. from sqlalchemy.sql.type_api import TypeEngine
  44. from ..ddl.impl import DefaultImpl
  45. class BatchOperationsImpl:
  46. def __init__(
  47. self,
  48. operations,
  49. table_name,
  50. schema,
  51. recreate,
  52. copy_from,
  53. table_args,
  54. table_kwargs,
  55. reflect_args,
  56. reflect_kwargs,
  57. naming_convention,
  58. partial_reordering,
  59. ):
  60. self.operations = operations
  61. self.table_name = table_name
  62. self.schema = schema
  63. if recreate not in ("auto", "always", "never"):
  64. raise ValueError(
  65. "recreate may be one of 'auto', 'always', or 'never'."
  66. )
  67. self.recreate = recreate
  68. self.copy_from = copy_from
  69. self.table_args = table_args
  70. self.table_kwargs = dict(table_kwargs)
  71. self.reflect_args = reflect_args
  72. self.reflect_kwargs = dict(reflect_kwargs)
  73. self.reflect_kwargs.setdefault(
  74. "listeners", list(self.reflect_kwargs.get("listeners", ()))
  75. )
  76. self.reflect_kwargs["listeners"].append(
  77. ("column_reflect", operations.impl.autogen_column_reflect)
  78. )
  79. self.naming_convention = naming_convention
  80. self.partial_reordering = partial_reordering
  81. self.batch = []
  82. @property
  83. def dialect(self) -> Dialect:
  84. return self.operations.impl.dialect
  85. @property
  86. def impl(self) -> DefaultImpl:
  87. return self.operations.impl
  88. def _should_recreate(self) -> bool:
  89. if self.recreate == "auto":
  90. return self.operations.impl.requires_recreate_in_batch(self)
  91. elif self.recreate == "always":
  92. return True
  93. else:
  94. return False
  95. def flush(self) -> None:
  96. should_recreate = self._should_recreate()
  97. with _ensure_scope_for_ddl(self.impl.connection):
  98. if not should_recreate:
  99. for opname, arg, kw in self.batch:
  100. fn = getattr(self.operations.impl, opname)
  101. fn(*arg, **kw)
  102. else:
  103. if self.naming_convention:
  104. m1 = MetaData(naming_convention=self.naming_convention)
  105. else:
  106. m1 = MetaData()
  107. if self.copy_from is not None:
  108. existing_table = self.copy_from
  109. reflected = False
  110. else:
  111. if self.operations.migration_context.as_sql:
  112. raise exc.CommandError(
  113. f"This operation cannot proceed in --sql mode; "
  114. f"batch mode with dialect "
  115. f"{self.operations.migration_context.dialect.name} " # noqa: E501
  116. f"requires a live database connection with which "
  117. f'to reflect the table "{self.table_name}". '
  118. f"To generate a batch SQL migration script using "
  119. "table "
  120. '"move and copy", a complete Table object '
  121. f'should be passed to the "copy_from" argument '
  122. "of the batch_alter_table() method so that table "
  123. "reflection can be skipped."
  124. )
  125. existing_table = Table(
  126. self.table_name,
  127. m1,
  128. schema=self.schema,
  129. autoload_with=self.operations.get_bind(),
  130. *self.reflect_args,
  131. **self.reflect_kwargs,
  132. )
  133. reflected = True
  134. batch_impl = ApplyBatchImpl(
  135. self.impl,
  136. existing_table,
  137. self.table_args,
  138. self.table_kwargs,
  139. reflected,
  140. partial_reordering=self.partial_reordering,
  141. )
  142. for opname, arg, kw in self.batch:
  143. fn = getattr(batch_impl, opname)
  144. fn(*arg, **kw)
  145. batch_impl._create(self.impl)
  146. def alter_column(self, *arg, **kw) -> None:
  147. self.batch.append(("alter_column", arg, kw))
  148. def add_column(self, *arg, **kw) -> None:
  149. if (
  150. "insert_before" in kw or "insert_after" in kw
  151. ) and not self._should_recreate():
  152. raise exc.CommandError(
  153. "Can't specify insert_before or insert_after when using "
  154. "ALTER; please specify recreate='always'"
  155. )
  156. self.batch.append(("add_column", arg, kw))
  157. def drop_column(self, *arg, **kw) -> None:
  158. self.batch.append(("drop_column", arg, kw))
  159. def add_constraint(self, const: Constraint) -> None:
  160. self.batch.append(("add_constraint", (const,), {}))
  161. def drop_constraint(self, const: Constraint) -> None:
  162. self.batch.append(("drop_constraint", (const,), {}))
  163. def rename_table(self, *arg, **kw):
  164. self.batch.append(("rename_table", arg, kw))
  165. def create_index(self, idx: Index, **kw: Any) -> None:
  166. self.batch.append(("create_index", (idx,), kw))
  167. def drop_index(self, idx: Index, **kw: Any) -> None:
  168. self.batch.append(("drop_index", (idx,), kw))
  169. def create_table_comment(self, table):
  170. self.batch.append(("create_table_comment", (table,), {}))
  171. def drop_table_comment(self, table):
  172. self.batch.append(("drop_table_comment", (table,), {}))
  173. def create_table(self, table):
  174. raise NotImplementedError("Can't create table in batch mode")
  175. def drop_table(self, table):
  176. raise NotImplementedError("Can't drop table in batch mode")
  177. def create_column_comment(self, column):
  178. self.batch.append(("create_column_comment", (column,), {}))
  179. class ApplyBatchImpl:
  180. def __init__(
  181. self,
  182. impl: DefaultImpl,
  183. table: Table,
  184. table_args: tuple,
  185. table_kwargs: Dict[str, Any],
  186. reflected: bool,
  187. partial_reordering: tuple = (),
  188. ) -> None:
  189. self.impl = impl
  190. self.table = table # this is a Table object
  191. self.table_args = table_args
  192. self.table_kwargs = table_kwargs
  193. self.temp_table_name = self._calc_temp_name(table.name)
  194. self.new_table: Optional[Table] = None
  195. self.partial_reordering = partial_reordering # tuple of tuples
  196. self.add_col_ordering: Tuple[
  197. Tuple[str, str], ...
  198. ] = () # tuple of tuples
  199. self.column_transfers = OrderedDict(
  200. (c.name, {"expr": c}) for c in self.table.c
  201. )
  202. self.existing_ordering = list(self.column_transfers)
  203. self.reflected = reflected
  204. self._grab_table_elements()
  205. @classmethod
  206. def _calc_temp_name(cls, tablename: Union[quoted_name, str]) -> str:
  207. return ("_alembic_tmp_%s" % tablename)[0:50]
  208. def _grab_table_elements(self) -> None:
  209. schema = self.table.schema
  210. self.columns: Dict[str, Column[Any]] = OrderedDict()
  211. for c in self.table.c:
  212. c_copy = _copy(c, schema=schema)
  213. c_copy.unique = c_copy.index = False
  214. # ensure that the type object was copied,
  215. # as we may need to modify it in-place
  216. if isinstance(c.type, SchemaEventTarget):
  217. assert c_copy.type is not c.type
  218. self.columns[c.name] = c_copy
  219. self.named_constraints: Dict[str, Constraint] = {}
  220. self.unnamed_constraints = []
  221. self.col_named_constraints = {}
  222. self.indexes: Dict[str, Index] = {}
  223. self.new_indexes: Dict[str, Index] = {}
  224. for const in self.table.constraints:
  225. if _is_type_bound(const):
  226. continue
  227. elif (
  228. self.reflected
  229. and isinstance(const, CheckConstraint)
  230. and not const.name
  231. ):
  232. # TODO: we are skipping unnamed reflected CheckConstraint
  233. # because
  234. # we have no way to determine _is_type_bound() for these.
  235. pass
  236. elif constraint_name_string(const.name):
  237. self.named_constraints[const.name] = const
  238. else:
  239. self.unnamed_constraints.append(const)
  240. if not self.reflected:
  241. for col in self.table.c:
  242. for const in col.constraints:
  243. if const.name:
  244. self.col_named_constraints[const.name] = (col, const)
  245. for idx in self.table.indexes:
  246. self.indexes[idx.name] = idx # type: ignore[index]
  247. for k in self.table.kwargs:
  248. self.table_kwargs.setdefault(k, self.table.kwargs[k])
  249. def _adjust_self_columns_for_partial_reordering(self) -> None:
  250. pairs = set()
  251. col_by_idx = list(self.columns)
  252. if self.partial_reordering:
  253. for tuple_ in self.partial_reordering:
  254. for index, elem in enumerate(tuple_):
  255. if index > 0:
  256. pairs.add((tuple_[index - 1], elem))
  257. else:
  258. for index, elem in enumerate(self.existing_ordering):
  259. if index > 0:
  260. pairs.add((col_by_idx[index - 1], elem))
  261. pairs.update(self.add_col_ordering)
  262. # this can happen if some columns were dropped and not removed
  263. # from existing_ordering. this should be prevented already, but
  264. # conservatively making sure this didn't happen
  265. pairs_list = [p for p in pairs if p[0] != p[1]]
  266. sorted_ = list(
  267. topological.sort(pairs_list, col_by_idx, deterministic_order=True)
  268. )
  269. self.columns = OrderedDict((k, self.columns[k]) for k in sorted_)
  270. self.column_transfers = OrderedDict(
  271. (k, self.column_transfers[k]) for k in sorted_
  272. )
  273. def _transfer_elements_to_new_table(self) -> None:
  274. assert self.new_table is None, "Can only create new table once"
  275. m = MetaData()
  276. schema = self.table.schema
  277. if self.partial_reordering or self.add_col_ordering:
  278. self._adjust_self_columns_for_partial_reordering()
  279. self.new_table = new_table = Table(
  280. self.temp_table_name,
  281. m,
  282. *(list(self.columns.values()) + list(self.table_args)),
  283. schema=schema,
  284. **self.table_kwargs,
  285. )
  286. for const in (
  287. list(self.named_constraints.values()) + self.unnamed_constraints
  288. ):
  289. const_columns = {c.key for c in _columns_for_constraint(const)}
  290. if not const_columns.issubset(self.column_transfers):
  291. continue
  292. const_copy: Constraint
  293. if isinstance(const, ForeignKeyConstraint):
  294. if _fk_is_self_referential(const):
  295. # for self-referential constraint, refer to the
  296. # *original* table name, and not _alembic_batch_temp.
  297. # This is consistent with how we're handling
  298. # FK constraints from other tables; we assume SQLite
  299. # no foreign keys just keeps the names unchanged, so
  300. # when we rename back, they match again.
  301. const_copy = _copy(
  302. const, schema=schema, target_table=self.table
  303. )
  304. else:
  305. # "target_table" for ForeignKeyConstraint.copy() is
  306. # only used if the FK is detected as being
  307. # self-referential, which we are handling above.
  308. const_copy = _copy(const, schema=schema)
  309. else:
  310. const_copy = _copy(
  311. const, schema=schema, target_table=new_table
  312. )
  313. if isinstance(const, ForeignKeyConstraint):
  314. self._setup_referent(m, const)
  315. new_table.append_constraint(const_copy)
  316. def _gather_indexes_from_both_tables(self) -> List[Index]:
  317. assert self.new_table is not None
  318. idx: List[Index] = []
  319. for idx_existing in self.indexes.values():
  320. # this is a lift-and-move from Table.to_metadata
  321. if idx_existing._column_flag:
  322. continue
  323. idx_copy = Index(
  324. idx_existing.name,
  325. unique=idx_existing.unique,
  326. *[
  327. _copy_expression(expr, self.new_table)
  328. for expr in _idx_table_bound_expressions(idx_existing)
  329. ],
  330. _table=self.new_table,
  331. **idx_existing.kwargs,
  332. )
  333. idx.append(idx_copy)
  334. for index in self.new_indexes.values():
  335. idx.append(
  336. Index(
  337. index.name,
  338. unique=index.unique,
  339. *[self.new_table.c[col] for col in index.columns.keys()],
  340. **index.kwargs,
  341. )
  342. )
  343. return idx
  344. def _setup_referent(
  345. self, metadata: MetaData, constraint: ForeignKeyConstraint
  346. ) -> None:
  347. spec = constraint.elements[0]._get_colspec()
  348. parts = spec.split(".")
  349. tname = parts[-2]
  350. if len(parts) == 3:
  351. referent_schema = parts[0]
  352. else:
  353. referent_schema = None
  354. if tname != self.temp_table_name:
  355. key = sql_schema._get_table_key(tname, referent_schema)
  356. def colspec(elem: Any):
  357. return elem._get_colspec()
  358. if key in metadata.tables:
  359. t = metadata.tables[key]
  360. for elem in constraint.elements:
  361. colname = colspec(elem).split(".")[-1]
  362. if colname not in t.c:
  363. t.append_column(Column(colname, sqltypes.NULLTYPE))
  364. else:
  365. Table(
  366. tname,
  367. metadata,
  368. *[
  369. Column(n, sqltypes.NULLTYPE)
  370. for n in [
  371. colspec(elem).split(".")[-1]
  372. for elem in constraint.elements
  373. ]
  374. ],
  375. schema=referent_schema,
  376. )
  377. def _create(self, op_impl: DefaultImpl) -> None:
  378. self._transfer_elements_to_new_table()
  379. op_impl.prep_table_for_batch(self, self.table)
  380. assert self.new_table is not None
  381. op_impl.create_table(self.new_table)
  382. try:
  383. op_impl._exec(
  384. self.new_table.insert()
  385. .inline()
  386. .from_select(
  387. list(
  388. k
  389. for k, transfer in self.column_transfers.items()
  390. if "expr" in transfer
  391. ),
  392. select(
  393. *[
  394. transfer["expr"]
  395. for transfer in self.column_transfers.values()
  396. if "expr" in transfer
  397. ]
  398. ),
  399. )
  400. )
  401. op_impl.drop_table(self.table)
  402. except:
  403. op_impl.drop_table(self.new_table)
  404. raise
  405. else:
  406. op_impl.rename_table(
  407. self.temp_table_name, self.table.name, schema=self.table.schema
  408. )
  409. self.new_table.name = self.table.name
  410. try:
  411. for idx in self._gather_indexes_from_both_tables():
  412. op_impl.create_index(idx)
  413. finally:
  414. self.new_table.name = self.temp_table_name
  415. def alter_column(
  416. self,
  417. table_name: str,
  418. column_name: str,
  419. nullable: Optional[bool] = None,
  420. server_default: Optional[Union[Function[Any], str, bool]] = False,
  421. name: Optional[str] = None,
  422. type_: Optional[TypeEngine] = None,
  423. autoincrement: Optional[Union[bool, Literal["auto"]]] = None,
  424. comment: Union[str, Literal[False]] = False,
  425. **kw,
  426. ) -> None:
  427. existing = self.columns[column_name]
  428. existing_transfer: Dict[str, Any] = self.column_transfers[column_name]
  429. if name is not None and name != column_name:
  430. # note that we don't change '.key' - we keep referring
  431. # to the renamed column by its old key in _create(). neat!
  432. existing.name = name
  433. existing_transfer["name"] = name
  434. existing_type = kw.get("existing_type", None)
  435. if existing_type:
  436. resolved_existing_type = _resolve_for_variant(
  437. kw["existing_type"], self.impl.dialect
  438. )
  439. # pop named constraints for Boolean/Enum for rename
  440. if (
  441. isinstance(resolved_existing_type, SchemaEventTarget)
  442. and resolved_existing_type.name # type:ignore[attr-defined] # noqa E501
  443. ):
  444. self.named_constraints.pop(
  445. resolved_existing_type.name, # type:ignore[attr-defined] # noqa E501
  446. None,
  447. )
  448. if type_ is not None:
  449. type_ = sqltypes.to_instance(type_)
  450. # old type is being discarded so turn off eventing
  451. # rules. Alternatively we can
  452. # erase the events set up by this type, but this is simpler.
  453. # we also ignore the drop_constraint that will come here from
  454. # Operations.implementation_for(alter_column)
  455. if isinstance(existing.type, SchemaEventTarget):
  456. existing.type._create_events = ( # type:ignore[attr-defined]
  457. existing.type.create_constraint # type:ignore[attr-defined] # noqa
  458. ) = False
  459. self.impl.cast_for_batch_migrate(
  460. existing, existing_transfer, type_
  461. )
  462. existing.type = type_
  463. # we *dont* however set events for the new type, because
  464. # alter_column is invoked from
  465. # Operations.implementation_for(alter_column) which already
  466. # will emit an add_constraint()
  467. if nullable is not None:
  468. existing.nullable = nullable
  469. if server_default is not False:
  470. if server_default is None:
  471. existing.server_default = None
  472. else:
  473. sql_schema.DefaultClause(
  474. server_default # type: ignore[arg-type]
  475. )._set_parent(existing)
  476. if autoincrement is not None:
  477. existing.autoincrement = bool(autoincrement)
  478. if comment is not False:
  479. existing.comment = comment
  480. def _setup_dependencies_for_add_column(
  481. self,
  482. colname: str,
  483. insert_before: Optional[str],
  484. insert_after: Optional[str],
  485. ) -> None:
  486. index_cols = self.existing_ordering
  487. col_indexes = {name: i for i, name in enumerate(index_cols)}
  488. if not self.partial_reordering:
  489. if insert_after:
  490. if not insert_before:
  491. if insert_after in col_indexes:
  492. # insert after an existing column
  493. idx = col_indexes[insert_after] + 1
  494. if idx < len(index_cols):
  495. insert_before = index_cols[idx]
  496. else:
  497. # insert after a column that is also new
  498. insert_before = dict(self.add_col_ordering)[
  499. insert_after
  500. ]
  501. if insert_before:
  502. if not insert_after:
  503. if insert_before in col_indexes:
  504. # insert before an existing column
  505. idx = col_indexes[insert_before] - 1
  506. if idx >= 0:
  507. insert_after = index_cols[idx]
  508. else:
  509. # insert before a column that is also new
  510. insert_after = {
  511. b: a for a, b in self.add_col_ordering
  512. }[insert_before]
  513. if insert_before:
  514. self.add_col_ordering += ((colname, insert_before),)
  515. if insert_after:
  516. self.add_col_ordering += ((insert_after, colname),)
  517. if (
  518. not self.partial_reordering
  519. and not insert_before
  520. and not insert_after
  521. and col_indexes
  522. ):
  523. self.add_col_ordering += ((index_cols[-1], colname),)
  524. def add_column(
  525. self,
  526. table_name: str,
  527. column: Column[Any],
  528. insert_before: Optional[str] = None,
  529. insert_after: Optional[str] = None,
  530. **kw,
  531. ) -> None:
  532. self._setup_dependencies_for_add_column(
  533. column.name, insert_before, insert_after
  534. )
  535. # we copy the column because operations.add_column()
  536. # gives us a Column that is part of a Table already.
  537. self.columns[column.name] = _copy(column, schema=self.table.schema)
  538. self.column_transfers[column.name] = {}
  539. def drop_column(
  540. self,
  541. table_name: str,
  542. column: Union[ColumnClause[Any], Column[Any]],
  543. **kw,
  544. ) -> None:
  545. if column.name in self.table.primary_key.columns:
  546. _remove_column_from_collection(
  547. self.table.primary_key.columns, column
  548. )
  549. del self.columns[column.name]
  550. del self.column_transfers[column.name]
  551. self.existing_ordering.remove(column.name)
  552. # pop named constraints for Boolean/Enum for rename
  553. if (
  554. "existing_type" in kw
  555. and isinstance(kw["existing_type"], SchemaEventTarget)
  556. and kw["existing_type"].name # type:ignore[attr-defined]
  557. ):
  558. self.named_constraints.pop(
  559. kw["existing_type"].name, None # type:ignore[attr-defined]
  560. )
  561. def create_column_comment(self, column):
  562. """the batch table creation function will issue create_column_comment
  563. on the real "impl" as part of the create table process.
  564. That is, the Column object will have the comment on it already,
  565. so when it is received by add_column() it will be a normal part of
  566. the CREATE TABLE and doesn't need an extra step here.
  567. """
  568. def create_table_comment(self, table):
  569. """the batch table creation function will issue create_table_comment
  570. on the real "impl" as part of the create table process.
  571. """
  572. def drop_table_comment(self, table):
  573. """the batch table creation function will issue drop_table_comment
  574. on the real "impl" as part of the create table process.
  575. """
  576. def add_constraint(self, const: Constraint) -> None:
  577. if not constraint_name_defined(const.name):
  578. raise ValueError("Constraint must have a name")
  579. if isinstance(const, sql_schema.PrimaryKeyConstraint):
  580. if self.table.primary_key in self.unnamed_constraints:
  581. self.unnamed_constraints.remove(self.table.primary_key)
  582. if constraint_name_string(const.name):
  583. self.named_constraints[const.name] = const
  584. else:
  585. self.unnamed_constraints.append(const)
  586. def drop_constraint(self, const: Constraint) -> None:
  587. if not const.name:
  588. raise ValueError("Constraint must have a name")
  589. try:
  590. if const.name in self.col_named_constraints:
  591. col, const = self.col_named_constraints.pop(const.name)
  592. for col_const in list(self.columns[col.name].constraints):
  593. if col_const.name == const.name:
  594. self.columns[col.name].constraints.remove(col_const)
  595. elif constraint_name_string(const.name):
  596. const = self.named_constraints.pop(const.name)
  597. elif const in self.unnamed_constraints:
  598. self.unnamed_constraints.remove(const)
  599. except KeyError:
  600. if _is_type_bound(const):
  601. # type-bound constraints are only included in the new
  602. # table via their type object in any case, so ignore the
  603. # drop_constraint() that comes here via the
  604. # Operations.implementation_for(alter_column)
  605. return
  606. raise ValueError("No such constraint: '%s'" % const.name)
  607. else:
  608. if isinstance(const, PrimaryKeyConstraint):
  609. for col in const.columns:
  610. self.columns[col.name].primary_key = False
  611. def create_index(self, idx: Index) -> None:
  612. self.new_indexes[idx.name] = idx # type: ignore[index]
  613. def drop_index(self, idx: Index) -> None:
  614. try:
  615. del self.indexes[idx.name] # type: ignore[arg-type]
  616. except KeyError:
  617. raise ValueError("No such index: '%s'" % idx.name)
  618. def rename_table(self, *arg, **kw):
  619. raise NotImplementedError("TODO")