jonghanko commited on
Commit
6883b42
·
verified ·
1 Parent(s): f9ba9d8

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/__init__.py +4 -0
  2. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/__main__.py +4 -0
  3. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/__init__.py +10 -0
  4. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/api.py +650 -0
  5. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/compare.py +1370 -0
  6. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/render.py +1172 -0
  7. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/rewriter.py +240 -0
  8. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/command.py +835 -0
  9. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/config.py +1020 -0
  10. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/context.py +5 -0
  11. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/context.pyi +856 -0
  12. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/__init__.py +6 -0
  13. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/_autogen.py +329 -0
  14. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/base.py +364 -0
  15. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/impl.py +902 -0
  16. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/mssql.py +421 -0
  17. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/mysql.py +495 -0
  18. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/oracle.py +202 -0
  19. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/postgresql.py +854 -0
  20. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/sqlite.py +237 -0
  21. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/environment.py +1 -0
  22. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/migration.py +1 -0
  23. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/op.py +5 -0
  24. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/op.pyi +1356 -0
  25. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/__init__.py +15 -0
  26. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/base.py +1923 -0
  27. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/batch.py +718 -0
  28. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/ops.py +2842 -0
  29. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/schemaobj.py +290 -0
  30. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/toimpl.py +242 -0
  31. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/py.typed +0 -0
  32. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/runtime/__init__.py +0 -0
  33. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/runtime/environment.py +1051 -0
  34. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/runtime/migration.py +1395 -0
  35. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/script/__init__.py +4 -0
  36. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/script/base.py +1055 -0
  37. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/script/revision.py +1728 -0
  38. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/script/write_hooks.py +176 -0
  39. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/async/README +1 -0
  40. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/async/alembic.ini.mako +147 -0
  41. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/async/env.py +89 -0
  42. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/async/script.py.mako +28 -0
  43. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/generic/README +1 -0
  44. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/generic/alembic.ini.mako +147 -0
  45. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/generic/env.py +78 -0
  46. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/generic/script.py.mako +28 -0
  47. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/multidb/README +12 -0
  48. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/multidb/alembic.ini.mako +155 -0
  49. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/multidb/env.py +140 -0
  50. Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/multidb/script.py.mako +51 -0
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from . import context
2
+ from . import op
3
+
4
+ __version__ = "1.16.4"
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/__main__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from .config import main
2
+
3
+ if __name__ == "__main__":
4
+ main(prog="alembic")
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/__init__.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ from .api import _render_migration_diffs as _render_migration_diffs
2
+ from .api import compare_metadata as compare_metadata
3
+ from .api import produce_migrations as produce_migrations
4
+ from .api import render_python_code as render_python_code
5
+ from .api import RevisionContext as RevisionContext
6
+ from .compare import _produce_net_changes as _produce_net_changes
7
+ from .compare import comparators as comparators
8
+ from .render import render_op_text as render_op_text
9
+ from .render import renderers as renderers
10
+ from .rewriter import Rewriter as Rewriter
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/api.py ADDED
@@ -0,0 +1,650 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import contextlib
4
+ from typing import Any
5
+ from typing import Dict
6
+ from typing import Iterator
7
+ from typing import List
8
+ from typing import Optional
9
+ from typing import Sequence
10
+ from typing import Set
11
+ from typing import TYPE_CHECKING
12
+ from typing import Union
13
+
14
+ from sqlalchemy import inspect
15
+
16
+ from . import compare
17
+ from . import render
18
+ from .. import util
19
+ from ..operations import ops
20
+ from ..util import sqla_compat
21
+
22
+ """Provide the 'autogenerate' feature which can produce migration operations
23
+ automatically."""
24
+
25
+ if TYPE_CHECKING:
26
+ from sqlalchemy.engine import Connection
27
+ from sqlalchemy.engine import Dialect
28
+ from sqlalchemy.engine import Inspector
29
+ from sqlalchemy.sql.schema import MetaData
30
+ from sqlalchemy.sql.schema import SchemaItem
31
+ from sqlalchemy.sql.schema import Table
32
+
33
+ from ..config import Config
34
+ from ..operations.ops import DowngradeOps
35
+ from ..operations.ops import MigrationScript
36
+ from ..operations.ops import UpgradeOps
37
+ from ..runtime.environment import NameFilterParentNames
38
+ from ..runtime.environment import NameFilterType
39
+ from ..runtime.environment import ProcessRevisionDirectiveFn
40
+ from ..runtime.environment import RenderItemFn
41
+ from ..runtime.migration import MigrationContext
42
+ from ..script.base import Script
43
+ from ..script.base import ScriptDirectory
44
+ from ..script.revision import _GetRevArg
45
+
46
+
47
+ def compare_metadata(context: MigrationContext, metadata: MetaData) -> Any:
48
+ """Compare a database schema to that given in a
49
+ :class:`~sqlalchemy.schema.MetaData` instance.
50
+
51
+ The database connection is presented in the context
52
+ of a :class:`.MigrationContext` object, which
53
+ provides database connectivity as well as optional
54
+ comparison functions to use for datatypes and
55
+ server defaults - see the "autogenerate" arguments
56
+ at :meth:`.EnvironmentContext.configure`
57
+ for details on these.
58
+
59
+ The return format is a list of "diff" directives,
60
+ each representing individual differences::
61
+
62
+ from alembic.migration import MigrationContext
63
+ from alembic.autogenerate import compare_metadata
64
+ from sqlalchemy import (
65
+ create_engine,
66
+ MetaData,
67
+ Column,
68
+ Integer,
69
+ String,
70
+ Table,
71
+ text,
72
+ )
73
+ import pprint
74
+
75
+ engine = create_engine("sqlite://")
76
+
77
+ with engine.begin() as conn:
78
+ conn.execute(
79
+ text(
80
+ '''
81
+ create table foo (
82
+ id integer not null primary key,
83
+ old_data varchar,
84
+ x integer
85
+ )
86
+ '''
87
+ )
88
+ )
89
+ conn.execute(text("create table bar (data varchar)"))
90
+
91
+ metadata = MetaData()
92
+ Table(
93
+ "foo",
94
+ metadata,
95
+ Column("id", Integer, primary_key=True),
96
+ Column("data", Integer),
97
+ Column("x", Integer, nullable=False),
98
+ )
99
+ Table("bat", metadata, Column("info", String))
100
+
101
+ mc = MigrationContext.configure(engine.connect())
102
+
103
+ diff = compare_metadata(mc, metadata)
104
+ pprint.pprint(diff, indent=2, width=20)
105
+
106
+ Output::
107
+
108
+ [
109
+ (
110
+ "add_table",
111
+ Table(
112
+ "bat",
113
+ MetaData(),
114
+ Column("info", String(), table=<bat>),
115
+ schema=None,
116
+ ),
117
+ ),
118
+ (
119
+ "remove_table",
120
+ Table(
121
+ "bar",
122
+ MetaData(),
123
+ Column("data", VARCHAR(), table=<bar>),
124
+ schema=None,
125
+ ),
126
+ ),
127
+ (
128
+ "add_column",
129
+ None,
130
+ "foo",
131
+ Column("data", Integer(), table=<foo>),
132
+ ),
133
+ [
134
+ (
135
+ "modify_nullable",
136
+ None,
137
+ "foo",
138
+ "x",
139
+ {
140
+ "existing_comment": None,
141
+ "existing_server_default": False,
142
+ "existing_type": INTEGER(),
143
+ },
144
+ True,
145
+ False,
146
+ )
147
+ ],
148
+ (
149
+ "remove_column",
150
+ None,
151
+ "foo",
152
+ Column("old_data", VARCHAR(), table=<foo>),
153
+ ),
154
+ ]
155
+
156
+ :param context: a :class:`.MigrationContext`
157
+ instance.
158
+ :param metadata: a :class:`~sqlalchemy.schema.MetaData`
159
+ instance.
160
+
161
+ .. seealso::
162
+
163
+ :func:`.produce_migrations` - produces a :class:`.MigrationScript`
164
+ structure based on metadata comparison.
165
+
166
+ """
167
+
168
+ migration_script = produce_migrations(context, metadata)
169
+ assert migration_script.upgrade_ops is not None
170
+ return migration_script.upgrade_ops.as_diffs()
171
+
172
+
173
+ def produce_migrations(
174
+ context: MigrationContext, metadata: MetaData
175
+ ) -> MigrationScript:
176
+ """Produce a :class:`.MigrationScript` structure based on schema
177
+ comparison.
178
+
179
+ This function does essentially what :func:`.compare_metadata` does,
180
+ but then runs the resulting list of diffs to produce the full
181
+ :class:`.MigrationScript` object. For an example of what this looks like,
182
+ see the example in :ref:`customizing_revision`.
183
+
184
+ .. seealso::
185
+
186
+ :func:`.compare_metadata` - returns more fundamental "diff"
187
+ data from comparing a schema.
188
+
189
+ """
190
+
191
+ autogen_context = AutogenContext(context, metadata=metadata)
192
+
193
+ migration_script = ops.MigrationScript(
194
+ rev_id=None,
195
+ upgrade_ops=ops.UpgradeOps([]),
196
+ downgrade_ops=ops.DowngradeOps([]),
197
+ )
198
+
199
+ compare._populate_migration_script(autogen_context, migration_script)
200
+
201
+ return migration_script
202
+
203
+
204
+ def render_python_code(
205
+ up_or_down_op: Union[UpgradeOps, DowngradeOps],
206
+ sqlalchemy_module_prefix: str = "sa.",
207
+ alembic_module_prefix: str = "op.",
208
+ render_as_batch: bool = False,
209
+ imports: Sequence[str] = (),
210
+ render_item: Optional[RenderItemFn] = None,
211
+ migration_context: Optional[MigrationContext] = None,
212
+ user_module_prefix: Optional[str] = None,
213
+ ) -> str:
214
+ """Render Python code given an :class:`.UpgradeOps` or
215
+ :class:`.DowngradeOps` object.
216
+
217
+ This is a convenience function that can be used to test the
218
+ autogenerate output of a user-defined :class:`.MigrationScript` structure.
219
+
220
+ :param up_or_down_op: :class:`.UpgradeOps` or :class:`.DowngradeOps` object
221
+ :param sqlalchemy_module_prefix: module prefix for SQLAlchemy objects
222
+ :param alembic_module_prefix: module prefix for Alembic constructs
223
+ :param render_as_batch: use "batch operations" style for rendering
224
+ :param imports: sequence of import symbols to add
225
+ :param render_item: callable to render items
226
+ :param migration_context: optional :class:`.MigrationContext`
227
+ :param user_module_prefix: optional string prefix for user-defined types
228
+
229
+ .. versionadded:: 1.11.0
230
+
231
+ """
232
+ opts = {
233
+ "sqlalchemy_module_prefix": sqlalchemy_module_prefix,
234
+ "alembic_module_prefix": alembic_module_prefix,
235
+ "render_item": render_item,
236
+ "render_as_batch": render_as_batch,
237
+ "user_module_prefix": user_module_prefix,
238
+ }
239
+
240
+ if migration_context is None:
241
+ from ..runtime.migration import MigrationContext
242
+ from sqlalchemy.engine.default import DefaultDialect
243
+
244
+ migration_context = MigrationContext.configure(
245
+ dialect=DefaultDialect()
246
+ )
247
+
248
+ autogen_context = AutogenContext(migration_context, opts=opts)
249
+ autogen_context.imports = set(imports)
250
+ return render._indent(
251
+ render._render_cmd_body(up_or_down_op, autogen_context)
252
+ )
253
+
254
+
255
+ def _render_migration_diffs(
256
+ context: MigrationContext, template_args: Dict[Any, Any]
257
+ ) -> None:
258
+ """legacy, used by test_autogen_composition at the moment"""
259
+
260
+ autogen_context = AutogenContext(context)
261
+
262
+ upgrade_ops = ops.UpgradeOps([])
263
+ compare._produce_net_changes(autogen_context, upgrade_ops)
264
+
265
+ migration_script = ops.MigrationScript(
266
+ rev_id=None,
267
+ upgrade_ops=upgrade_ops,
268
+ downgrade_ops=upgrade_ops.reverse(),
269
+ )
270
+
271
+ render._render_python_into_templatevars(
272
+ autogen_context, migration_script, template_args
273
+ )
274
+
275
+
276
+ class AutogenContext:
277
+ """Maintains configuration and state that's specific to an
278
+ autogenerate operation."""
279
+
280
+ metadata: Union[MetaData, Sequence[MetaData], None] = None
281
+ """The :class:`~sqlalchemy.schema.MetaData` object
282
+ representing the destination.
283
+
284
+ This object is the one that is passed within ``env.py``
285
+ to the :paramref:`.EnvironmentContext.configure.target_metadata`
286
+ parameter. It represents the structure of :class:`.Table` and other
287
+ objects as stated in the current database model, and represents the
288
+ destination structure for the database being examined.
289
+
290
+ While the :class:`~sqlalchemy.schema.MetaData` object is primarily
291
+ known as a collection of :class:`~sqlalchemy.schema.Table` objects,
292
+ it also has an :attr:`~sqlalchemy.schema.MetaData.info` dictionary
293
+ that may be used by end-user schemes to store additional schema-level
294
+ objects that are to be compared in custom autogeneration schemes.
295
+
296
+ """
297
+
298
+ connection: Optional[Connection] = None
299
+ """The :class:`~sqlalchemy.engine.base.Connection` object currently
300
+ connected to the database backend being compared.
301
+
302
+ This is obtained from the :attr:`.MigrationContext.bind` and is
303
+ ultimately set up in the ``env.py`` script.
304
+
305
+ """
306
+
307
+ dialect: Optional[Dialect] = None
308
+ """The :class:`~sqlalchemy.engine.Dialect` object currently in use.
309
+
310
+ This is normally obtained from the
311
+ :attr:`~sqlalchemy.engine.base.Connection.dialect` attribute.
312
+
313
+ """
314
+
315
+ imports: Set[str] = None # type: ignore[assignment]
316
+ """A ``set()`` which contains string Python import directives.
317
+
318
+ The directives are to be rendered into the ``${imports}`` section
319
+ of a script template. The set is normally empty and can be modified
320
+ within hooks such as the
321
+ :paramref:`.EnvironmentContext.configure.render_item` hook.
322
+
323
+ .. seealso::
324
+
325
+ :ref:`autogen_render_types`
326
+
327
+ """
328
+
329
+ migration_context: MigrationContext = None # type: ignore[assignment]
330
+ """The :class:`.MigrationContext` established by the ``env.py`` script."""
331
+
332
+ def __init__(
333
+ self,
334
+ migration_context: MigrationContext,
335
+ metadata: Union[MetaData, Sequence[MetaData], None] = None,
336
+ opts: Optional[Dict[str, Any]] = None,
337
+ autogenerate: bool = True,
338
+ ) -> None:
339
+ if (
340
+ autogenerate
341
+ and migration_context is not None
342
+ and migration_context.as_sql
343
+ ):
344
+ raise util.CommandError(
345
+ "autogenerate can't use as_sql=True as it prevents querying "
346
+ "the database for schema information"
347
+ )
348
+
349
+ if opts is None:
350
+ opts = migration_context.opts
351
+
352
+ self.metadata = metadata = (
353
+ opts.get("target_metadata", None) if metadata is None else metadata
354
+ )
355
+
356
+ if (
357
+ autogenerate
358
+ and metadata is None
359
+ and migration_context is not None
360
+ and migration_context.script is not None
361
+ ):
362
+ raise util.CommandError(
363
+ "Can't proceed with --autogenerate option; environment "
364
+ "script %s does not provide "
365
+ "a MetaData object or sequence of objects to the context."
366
+ % (migration_context.script.env_py_location)
367
+ )
368
+
369
+ include_object = opts.get("include_object", None)
370
+ include_name = opts.get("include_name", None)
371
+
372
+ object_filters = []
373
+ name_filters = []
374
+ if include_object:
375
+ object_filters.append(include_object)
376
+ if include_name:
377
+ name_filters.append(include_name)
378
+
379
+ self._object_filters = object_filters
380
+ self._name_filters = name_filters
381
+
382
+ self.migration_context = migration_context
383
+ if self.migration_context is not None:
384
+ self.connection = self.migration_context.bind
385
+ self.dialect = self.migration_context.dialect
386
+
387
+ self.imports = set()
388
+ self.opts: Dict[str, Any] = opts
389
+ self._has_batch: bool = False
390
+
391
+ @util.memoized_property
392
+ def inspector(self) -> Inspector:
393
+ if self.connection is None:
394
+ raise TypeError(
395
+ "can't return inspector as this "
396
+ "AutogenContext has no database connection"
397
+ )
398
+ return inspect(self.connection)
399
+
400
+ @contextlib.contextmanager
401
+ def _within_batch(self) -> Iterator[None]:
402
+ self._has_batch = True
403
+ yield
404
+ self._has_batch = False
405
+
406
+ def run_name_filters(
407
+ self,
408
+ name: Optional[str],
409
+ type_: NameFilterType,
410
+ parent_names: NameFilterParentNames,
411
+ ) -> bool:
412
+ """Run the context's name filters and return True if the targets
413
+ should be part of the autogenerate operation.
414
+
415
+ This method should be run for every kind of name encountered within the
416
+ reflection side of an autogenerate operation, giving the environment
417
+ the chance to filter what names should be reflected as database
418
+ objects. The filters here are produced directly via the
419
+ :paramref:`.EnvironmentContext.configure.include_name` parameter.
420
+
421
+ """
422
+ if "schema_name" in parent_names:
423
+ if type_ == "table":
424
+ table_name = name
425
+ else:
426
+ table_name = parent_names.get("table_name", None)
427
+ if table_name:
428
+ schema_name = parent_names["schema_name"]
429
+ if schema_name:
430
+ parent_names["schema_qualified_table_name"] = "%s.%s" % (
431
+ schema_name,
432
+ table_name,
433
+ )
434
+ else:
435
+ parent_names["schema_qualified_table_name"] = table_name
436
+
437
+ for fn in self._name_filters:
438
+ if not fn(name, type_, parent_names):
439
+ return False
440
+ else:
441
+ return True
442
+
443
+ def run_object_filters(
444
+ self,
445
+ object_: SchemaItem,
446
+ name: sqla_compat._ConstraintName,
447
+ type_: NameFilterType,
448
+ reflected: bool,
449
+ compare_to: Optional[SchemaItem],
450
+ ) -> bool:
451
+ """Run the context's object filters and return True if the targets
452
+ should be part of the autogenerate operation.
453
+
454
+ This method should be run for every kind of object encountered within
455
+ an autogenerate operation, giving the environment the chance
456
+ to filter what objects should be included in the comparison.
457
+ The filters here are produced directly via the
458
+ :paramref:`.EnvironmentContext.configure.include_object` parameter.
459
+
460
+ """
461
+ for fn in self._object_filters:
462
+ if not fn(object_, name, type_, reflected, compare_to):
463
+ return False
464
+ else:
465
+ return True
466
+
467
+ run_filters = run_object_filters
468
+
469
+ @util.memoized_property
470
+ def sorted_tables(self) -> List[Table]:
471
+ """Return an aggregate of the :attr:`.MetaData.sorted_tables`
472
+ collection(s).
473
+
474
+ For a sequence of :class:`.MetaData` objects, this
475
+ concatenates the :attr:`.MetaData.sorted_tables` collection
476
+ for each individual :class:`.MetaData` in the order of the
477
+ sequence. It does **not** collate the sorted tables collections.
478
+
479
+ """
480
+ result = []
481
+ for m in util.to_list(self.metadata):
482
+ result.extend(m.sorted_tables)
483
+ return result
484
+
485
+ @util.memoized_property
486
+ def table_key_to_table(self) -> Dict[str, Table]:
487
+ """Return an aggregate of the :attr:`.MetaData.tables` dictionaries.
488
+
489
+ The :attr:`.MetaData.tables` collection is a dictionary of table key
490
+ to :class:`.Table`; this method aggregates the dictionary across
491
+ multiple :class:`.MetaData` objects into one dictionary.
492
+
493
+ Duplicate table keys are **not** supported; if two :class:`.MetaData`
494
+ objects contain the same table key, an exception is raised.
495
+
496
+ """
497
+ result: Dict[str, Table] = {}
498
+ for m in util.to_list(self.metadata):
499
+ intersect = set(result).intersection(set(m.tables))
500
+ if intersect:
501
+ raise ValueError(
502
+ "Duplicate table keys across multiple "
503
+ "MetaData objects: %s"
504
+ % (", ".join('"%s"' % key for key in sorted(intersect)))
505
+ )
506
+
507
+ result.update(m.tables)
508
+ return result
509
+
510
+
511
+ class RevisionContext:
512
+ """Maintains configuration and state that's specific to a revision
513
+ file generation operation."""
514
+
515
+ generated_revisions: List[MigrationScript]
516
+ process_revision_directives: Optional[ProcessRevisionDirectiveFn]
517
+
518
+ def __init__(
519
+ self,
520
+ config: Config,
521
+ script_directory: ScriptDirectory,
522
+ command_args: Dict[str, Any],
523
+ process_revision_directives: Optional[
524
+ ProcessRevisionDirectiveFn
525
+ ] = None,
526
+ ) -> None:
527
+ self.config = config
528
+ self.script_directory = script_directory
529
+ self.command_args = command_args
530
+ self.process_revision_directives = process_revision_directives
531
+ self.template_args = {
532
+ "config": config # Let templates use config for
533
+ # e.g. multiple databases
534
+ }
535
+ self.generated_revisions = [self._default_revision()]
536
+
537
+ def _to_script(
538
+ self, migration_script: MigrationScript
539
+ ) -> Optional[Script]:
540
+ template_args: Dict[str, Any] = self.template_args.copy()
541
+
542
+ if getattr(migration_script, "_needs_render", False):
543
+ autogen_context = self._last_autogen_context
544
+
545
+ # clear out existing imports if we are doing multiple
546
+ # renders
547
+ autogen_context.imports = set()
548
+ if migration_script.imports:
549
+ autogen_context.imports.update(migration_script.imports)
550
+ render._render_python_into_templatevars(
551
+ autogen_context, migration_script, template_args
552
+ )
553
+
554
+ assert migration_script.rev_id is not None
555
+ return self.script_directory.generate_revision(
556
+ migration_script.rev_id,
557
+ migration_script.message,
558
+ refresh=True,
559
+ head=migration_script.head,
560
+ splice=migration_script.splice,
561
+ branch_labels=migration_script.branch_label,
562
+ version_path=migration_script.version_path,
563
+ depends_on=migration_script.depends_on,
564
+ **template_args,
565
+ )
566
+
567
+ def run_autogenerate(
568
+ self, rev: _GetRevArg, migration_context: MigrationContext
569
+ ) -> None:
570
+ self._run_environment(rev, migration_context, True)
571
+
572
+ def run_no_autogenerate(
573
+ self, rev: _GetRevArg, migration_context: MigrationContext
574
+ ) -> None:
575
+ self._run_environment(rev, migration_context, False)
576
+
577
+ def _run_environment(
578
+ self,
579
+ rev: _GetRevArg,
580
+ migration_context: MigrationContext,
581
+ autogenerate: bool,
582
+ ) -> None:
583
+ if autogenerate:
584
+ if self.command_args["sql"]:
585
+ raise util.CommandError(
586
+ "Using --sql with --autogenerate does not make any sense"
587
+ )
588
+ if set(self.script_directory.get_revisions(rev)) != set(
589
+ self.script_directory.get_revisions("heads")
590
+ ):
591
+ raise util.CommandError("Target database is not up to date.")
592
+
593
+ upgrade_token = migration_context.opts["upgrade_token"]
594
+ downgrade_token = migration_context.opts["downgrade_token"]
595
+
596
+ migration_script = self.generated_revisions[-1]
597
+ if not getattr(migration_script, "_needs_render", False):
598
+ migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token
599
+ migration_script.downgrade_ops_list[-1].downgrade_token = (
600
+ downgrade_token
601
+ )
602
+ migration_script._needs_render = True
603
+ else:
604
+ migration_script._upgrade_ops.append(
605
+ ops.UpgradeOps([], upgrade_token=upgrade_token)
606
+ )
607
+ migration_script._downgrade_ops.append(
608
+ ops.DowngradeOps([], downgrade_token=downgrade_token)
609
+ )
610
+
611
+ autogen_context = AutogenContext(
612
+ migration_context, autogenerate=autogenerate
613
+ )
614
+ self._last_autogen_context: AutogenContext = autogen_context
615
+
616
+ if autogenerate:
617
+ compare._populate_migration_script(
618
+ autogen_context, migration_script
619
+ )
620
+
621
+ if self.process_revision_directives:
622
+ self.process_revision_directives(
623
+ migration_context, rev, self.generated_revisions
624
+ )
625
+
626
+ hook = migration_context.opts["process_revision_directives"]
627
+ if hook:
628
+ hook(migration_context, rev, self.generated_revisions)
629
+
630
+ for migration_script in self.generated_revisions:
631
+ migration_script._needs_render = True
632
+
633
+ def _default_revision(self) -> MigrationScript:
634
+ command_args: Dict[str, Any] = self.command_args
635
+ op = ops.MigrationScript(
636
+ rev_id=command_args["rev_id"] or util.rev_id(),
637
+ message=command_args["message"],
638
+ upgrade_ops=ops.UpgradeOps([]),
639
+ downgrade_ops=ops.DowngradeOps([]),
640
+ head=command_args["head"],
641
+ splice=command_args["splice"],
642
+ branch_label=command_args["branch_label"],
643
+ version_path=command_args["version_path"],
644
+ depends_on=command_args["depends_on"],
645
+ )
646
+ return op
647
+
648
+ def generate_scripts(self) -> Iterator[Optional[Script]]:
649
+ for generated_revision in self.generated_revisions:
650
+ yield self._to_script(generated_revision)
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/compare.py ADDED
@@ -0,0 +1,1370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import contextlib
7
+ import logging
8
+ import re
9
+ from typing import Any
10
+ from typing import cast
11
+ from typing import Dict
12
+ from typing import Iterator
13
+ from typing import Mapping
14
+ from typing import Optional
15
+ from typing import Set
16
+ from typing import Tuple
17
+ from typing import TYPE_CHECKING
18
+ from typing import TypeVar
19
+ from typing import Union
20
+
21
+ from sqlalchemy import event
22
+ from sqlalchemy import inspect
23
+ from sqlalchemy import schema as sa_schema
24
+ from sqlalchemy import text
25
+ from sqlalchemy import types as sqltypes
26
+ from sqlalchemy.sql import expression
27
+ from sqlalchemy.sql.elements import conv
28
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
29
+ from sqlalchemy.sql.schema import Index
30
+ from sqlalchemy.sql.schema import UniqueConstraint
31
+ from sqlalchemy.util import OrderedSet
32
+
33
+ from .. import util
34
+ from ..ddl._autogen import is_index_sig
35
+ from ..ddl._autogen import is_uq_sig
36
+ from ..operations import ops
37
+ from ..util import sqla_compat
38
+
39
+ if TYPE_CHECKING:
40
+ from typing import Literal
41
+
42
+ from sqlalchemy.engine.reflection import Inspector
43
+ from sqlalchemy.sql.elements import quoted_name
44
+ from sqlalchemy.sql.elements import TextClause
45
+ from sqlalchemy.sql.schema import Column
46
+ from sqlalchemy.sql.schema import Table
47
+
48
+ from alembic.autogenerate.api import AutogenContext
49
+ from alembic.ddl.impl import DefaultImpl
50
+ from alembic.operations.ops import AlterColumnOp
51
+ from alembic.operations.ops import MigrationScript
52
+ from alembic.operations.ops import ModifyTableOps
53
+ from alembic.operations.ops import UpgradeOps
54
+ from ..ddl._autogen import _constraint_sig
55
+
56
+
57
+ log = logging.getLogger(__name__)
58
+
59
+
60
+ def _populate_migration_script(
61
+ autogen_context: AutogenContext, migration_script: MigrationScript
62
+ ) -> None:
63
+ upgrade_ops = migration_script.upgrade_ops_list[-1]
64
+ downgrade_ops = migration_script.downgrade_ops_list[-1]
65
+
66
+ _produce_net_changes(autogen_context, upgrade_ops)
67
+ upgrade_ops.reverse_into(downgrade_ops)
68
+
69
+
70
+ comparators = util.Dispatcher(uselist=True)
71
+
72
+
73
+ def _produce_net_changes(
74
+ autogen_context: AutogenContext, upgrade_ops: UpgradeOps
75
+ ) -> None:
76
+ connection = autogen_context.connection
77
+ assert connection is not None
78
+ include_schemas = autogen_context.opts.get("include_schemas", False)
79
+
80
+ inspector: Inspector = inspect(connection)
81
+
82
+ default_schema = connection.dialect.default_schema_name
83
+ schemas: Set[Optional[str]]
84
+ if include_schemas:
85
+ schemas = set(inspector.get_schema_names())
86
+ # replace default schema name with None
87
+ schemas.discard("information_schema")
88
+ # replace the "default" schema with None
89
+ schemas.discard(default_schema)
90
+ schemas.add(None)
91
+ else:
92
+ schemas = {None}
93
+
94
+ schemas = {
95
+ s for s in schemas if autogen_context.run_name_filters(s, "schema", {})
96
+ }
97
+
98
+ assert autogen_context.dialect is not None
99
+ comparators.dispatch("schema", autogen_context.dialect.name)(
100
+ autogen_context, upgrade_ops, schemas
101
+ )
102
+
103
+
104
+ @comparators.dispatch_for("schema")
105
+ def _autogen_for_tables(
106
+ autogen_context: AutogenContext,
107
+ upgrade_ops: UpgradeOps,
108
+ schemas: Union[Set[None], Set[Optional[str]]],
109
+ ) -> None:
110
+ inspector = autogen_context.inspector
111
+
112
+ conn_table_names: Set[Tuple[Optional[str], str]] = set()
113
+
114
+ version_table_schema = (
115
+ autogen_context.migration_context.version_table_schema
116
+ )
117
+ version_table = autogen_context.migration_context.version_table
118
+
119
+ for schema_name in schemas:
120
+ tables = set(inspector.get_table_names(schema=schema_name))
121
+ if schema_name == version_table_schema:
122
+ tables = tables.difference(
123
+ [autogen_context.migration_context.version_table]
124
+ )
125
+
126
+ conn_table_names.update(
127
+ (schema_name, tname)
128
+ for tname in tables
129
+ if autogen_context.run_name_filters(
130
+ tname, "table", {"schema_name": schema_name}
131
+ )
132
+ )
133
+
134
+ metadata_table_names = OrderedSet(
135
+ [(table.schema, table.name) for table in autogen_context.sorted_tables]
136
+ ).difference([(version_table_schema, version_table)])
137
+
138
+ _compare_tables(
139
+ conn_table_names,
140
+ metadata_table_names,
141
+ inspector,
142
+ upgrade_ops,
143
+ autogen_context,
144
+ )
145
+
146
+
147
+ def _compare_tables(
148
+ conn_table_names: set,
149
+ metadata_table_names: set,
150
+ inspector: Inspector,
151
+ upgrade_ops: UpgradeOps,
152
+ autogen_context: AutogenContext,
153
+ ) -> None:
154
+ default_schema = inspector.bind.dialect.default_schema_name
155
+
156
+ # tables coming from the connection will not have "schema"
157
+ # set if it matches default_schema_name; so we need a list
158
+ # of table names from local metadata that also have "None" if schema
159
+ # == default_schema_name. Most setups will be like this anyway but
160
+ # some are not (see #170)
161
+ metadata_table_names_no_dflt_schema = OrderedSet(
162
+ [
163
+ (schema if schema != default_schema else None, tname)
164
+ for schema, tname in metadata_table_names
165
+ ]
166
+ )
167
+
168
+ # to adjust for the MetaData collection storing the tables either
169
+ # as "schemaname.tablename" or just "tablename", create a new lookup
170
+ # which will match the "non-default-schema" keys to the Table object.
171
+ tname_to_table = {
172
+ no_dflt_schema: autogen_context.table_key_to_table[
173
+ sa_schema._get_table_key(tname, schema)
174
+ ]
175
+ for no_dflt_schema, (schema, tname) in zip(
176
+ metadata_table_names_no_dflt_schema, metadata_table_names
177
+ )
178
+ }
179
+ metadata_table_names = metadata_table_names_no_dflt_schema
180
+
181
+ for s, tname in metadata_table_names.difference(conn_table_names):
182
+ name = "%s.%s" % (s, tname) if s else tname
183
+ metadata_table = tname_to_table[(s, tname)]
184
+ if autogen_context.run_object_filters(
185
+ metadata_table, tname, "table", False, None
186
+ ):
187
+ upgrade_ops.ops.append(
188
+ ops.CreateTableOp.from_table(metadata_table)
189
+ )
190
+ log.info("Detected added table %r", name)
191
+ modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
192
+
193
+ comparators.dispatch("table")(
194
+ autogen_context,
195
+ modify_table_ops,
196
+ s,
197
+ tname,
198
+ None,
199
+ metadata_table,
200
+ )
201
+ if not modify_table_ops.is_empty():
202
+ upgrade_ops.ops.append(modify_table_ops)
203
+
204
+ removal_metadata = sa_schema.MetaData()
205
+ for s, tname in conn_table_names.difference(metadata_table_names):
206
+ name = sa_schema._get_table_key(tname, s)
207
+ exists = name in removal_metadata.tables
208
+ t = sa_schema.Table(tname, removal_metadata, schema=s)
209
+
210
+ if not exists:
211
+ event.listen(
212
+ t,
213
+ "column_reflect",
214
+ # fmt: off
215
+ autogen_context.migration_context.impl.
216
+ _compat_autogen_column_reflect
217
+ (inspector),
218
+ # fmt: on
219
+ )
220
+ _InspectorConv(inspector).reflect_table(t, include_columns=None)
221
+ if autogen_context.run_object_filters(t, tname, "table", True, None):
222
+ modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
223
+
224
+ comparators.dispatch("table")(
225
+ autogen_context, modify_table_ops, s, tname, t, None
226
+ )
227
+ if not modify_table_ops.is_empty():
228
+ upgrade_ops.ops.append(modify_table_ops)
229
+
230
+ upgrade_ops.ops.append(ops.DropTableOp.from_table(t))
231
+ log.info("Detected removed table %r", name)
232
+
233
+ existing_tables = conn_table_names.intersection(metadata_table_names)
234
+
235
+ existing_metadata = sa_schema.MetaData()
236
+ conn_column_info = {}
237
+ for s, tname in existing_tables:
238
+ name = sa_schema._get_table_key(tname, s)
239
+ exists = name in existing_metadata.tables
240
+ t = sa_schema.Table(tname, existing_metadata, schema=s)
241
+ if not exists:
242
+ event.listen(
243
+ t,
244
+ "column_reflect",
245
+ # fmt: off
246
+ autogen_context.migration_context.impl.
247
+ _compat_autogen_column_reflect(inspector),
248
+ # fmt: on
249
+ )
250
+ _InspectorConv(inspector).reflect_table(t, include_columns=None)
251
+
252
+ conn_column_info[(s, tname)] = t
253
+
254
+ for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])):
255
+ s = s or None
256
+ name = "%s.%s" % (s, tname) if s else tname
257
+ metadata_table = tname_to_table[(s, tname)]
258
+ conn_table = existing_metadata.tables[name]
259
+
260
+ if autogen_context.run_object_filters(
261
+ metadata_table, tname, "table", False, conn_table
262
+ ):
263
+ modify_table_ops = ops.ModifyTableOps(tname, [], schema=s)
264
+ with _compare_columns(
265
+ s,
266
+ tname,
267
+ conn_table,
268
+ metadata_table,
269
+ modify_table_ops,
270
+ autogen_context,
271
+ inspector,
272
+ ):
273
+ comparators.dispatch("table")(
274
+ autogen_context,
275
+ modify_table_ops,
276
+ s,
277
+ tname,
278
+ conn_table,
279
+ metadata_table,
280
+ )
281
+
282
+ if not modify_table_ops.is_empty():
283
+ upgrade_ops.ops.append(modify_table_ops)
284
+
285
+
286
+ _IndexColumnSortingOps: Mapping[str, Any] = util.immutabledict(
287
+ {
288
+ "asc": expression.asc,
289
+ "desc": expression.desc,
290
+ "nulls_first": expression.nullsfirst,
291
+ "nulls_last": expression.nullslast,
292
+ "nullsfirst": expression.nullsfirst, # 1_3 name
293
+ "nullslast": expression.nullslast, # 1_3 name
294
+ }
295
+ )
296
+
297
+
298
+ def _make_index(
299
+ impl: DefaultImpl, params: Dict[str, Any], conn_table: Table
300
+ ) -> Optional[Index]:
301
+ exprs: list[Union[Column[Any], TextClause]] = []
302
+ sorting = params.get("column_sorting")
303
+
304
+ for num, col_name in enumerate(params["column_names"]):
305
+ item: Union[Column[Any], TextClause]
306
+ if col_name is None:
307
+ assert "expressions" in params
308
+ name = params["expressions"][num]
309
+ item = text(name)
310
+ else:
311
+ name = col_name
312
+ item = conn_table.c[col_name]
313
+ if sorting and name in sorting:
314
+ for operator in sorting[name]:
315
+ if operator in _IndexColumnSortingOps:
316
+ item = _IndexColumnSortingOps[operator](item)
317
+ exprs.append(item)
318
+ ix = sa_schema.Index(
319
+ params["name"],
320
+ *exprs,
321
+ unique=params["unique"],
322
+ _table=conn_table,
323
+ **impl.adjust_reflected_dialect_options(params, "index"),
324
+ )
325
+ if "duplicates_constraint" in params:
326
+ ix.info["duplicates_constraint"] = params["duplicates_constraint"]
327
+ return ix
328
+
329
+
330
+ def _make_unique_constraint(
331
+ impl: DefaultImpl, params: Dict[str, Any], conn_table: Table
332
+ ) -> UniqueConstraint:
333
+ uq = sa_schema.UniqueConstraint(
334
+ *[conn_table.c[cname] for cname in params["column_names"]],
335
+ name=params["name"],
336
+ **impl.adjust_reflected_dialect_options(params, "unique_constraint"),
337
+ )
338
+ if "duplicates_index" in params:
339
+ uq.info["duplicates_index"] = params["duplicates_index"]
340
+
341
+ return uq
342
+
343
+
344
+ def _make_foreign_key(
345
+ params: Dict[str, Any], conn_table: Table
346
+ ) -> ForeignKeyConstraint:
347
+ tname = params["referred_table"]
348
+ if params["referred_schema"]:
349
+ tname = "%s.%s" % (params["referred_schema"], tname)
350
+
351
+ options = params.get("options", {})
352
+
353
+ const = sa_schema.ForeignKeyConstraint(
354
+ [conn_table.c[cname] for cname in params["constrained_columns"]],
355
+ ["%s.%s" % (tname, n) for n in params["referred_columns"]],
356
+ onupdate=options.get("onupdate"),
357
+ ondelete=options.get("ondelete"),
358
+ deferrable=options.get("deferrable"),
359
+ initially=options.get("initially"),
360
+ name=params["name"],
361
+ )
362
+ # needed by 0.7
363
+ conn_table.append_constraint(const)
364
+ return const
365
+
366
+
367
+ @contextlib.contextmanager
368
+ def _compare_columns(
369
+ schema: Optional[str],
370
+ tname: Union[quoted_name, str],
371
+ conn_table: Table,
372
+ metadata_table: Table,
373
+ modify_table_ops: ModifyTableOps,
374
+ autogen_context: AutogenContext,
375
+ inspector: Inspector,
376
+ ) -> Iterator[None]:
377
+ name = "%s.%s" % (schema, tname) if schema else tname
378
+ metadata_col_names = OrderedSet(
379
+ c.name for c in metadata_table.c if not c.system
380
+ )
381
+ metadata_cols_by_name = {
382
+ c.name: c for c in metadata_table.c if not c.system
383
+ }
384
+
385
+ conn_col_names = {
386
+ c.name: c
387
+ for c in conn_table.c
388
+ if autogen_context.run_name_filters(
389
+ c.name, "column", {"table_name": tname, "schema_name": schema}
390
+ )
391
+ }
392
+
393
+ for cname in metadata_col_names.difference(conn_col_names):
394
+ if autogen_context.run_object_filters(
395
+ metadata_cols_by_name[cname], cname, "column", False, None
396
+ ):
397
+ modify_table_ops.ops.append(
398
+ ops.AddColumnOp.from_column_and_tablename(
399
+ schema, tname, metadata_cols_by_name[cname]
400
+ )
401
+ )
402
+ log.info("Detected added column '%s.%s'", name, cname)
403
+
404
+ for colname in metadata_col_names.intersection(conn_col_names):
405
+ metadata_col = metadata_cols_by_name[colname]
406
+ conn_col = conn_table.c[colname]
407
+ if not autogen_context.run_object_filters(
408
+ metadata_col, colname, "column", False, conn_col
409
+ ):
410
+ continue
411
+ alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema)
412
+
413
+ comparators.dispatch("column")(
414
+ autogen_context,
415
+ alter_column_op,
416
+ schema,
417
+ tname,
418
+ colname,
419
+ conn_col,
420
+ metadata_col,
421
+ )
422
+
423
+ if alter_column_op.has_changes():
424
+ modify_table_ops.ops.append(alter_column_op)
425
+
426
+ yield
427
+
428
+ for cname in set(conn_col_names).difference(metadata_col_names):
429
+ if autogen_context.run_object_filters(
430
+ conn_table.c[cname], cname, "column", True, None
431
+ ):
432
+ modify_table_ops.ops.append(
433
+ ops.DropColumnOp.from_column_and_tablename(
434
+ schema, tname, conn_table.c[cname]
435
+ )
436
+ )
437
+ log.info("Detected removed column '%s.%s'", name, cname)
438
+
439
+
440
+ _C = TypeVar("_C", bound=Union[UniqueConstraint, ForeignKeyConstraint, Index])
441
+
442
+
443
+ class _InspectorConv:
444
+ __slots__ = ("inspector",)
445
+
446
+ def __init__(self, inspector):
447
+ self.inspector = inspector
448
+
449
+ def _apply_reflectinfo_conv(self, consts):
450
+ if not consts:
451
+ return consts
452
+ for const in consts:
453
+ if const["name"] is not None and not isinstance(
454
+ const["name"], conv
455
+ ):
456
+ const["name"] = conv(const["name"])
457
+ return consts
458
+
459
+ def _apply_constraint_conv(self, consts):
460
+ if not consts:
461
+ return consts
462
+ for const in consts:
463
+ if const.name is not None and not isinstance(const.name, conv):
464
+ const.name = conv(const.name)
465
+ return consts
466
+
467
+ def get_indexes(self, *args, **kw):
468
+ return self._apply_reflectinfo_conv(
469
+ self.inspector.get_indexes(*args, **kw)
470
+ )
471
+
472
+ def get_unique_constraints(self, *args, **kw):
473
+ return self._apply_reflectinfo_conv(
474
+ self.inspector.get_unique_constraints(*args, **kw)
475
+ )
476
+
477
+ def get_foreign_keys(self, *args, **kw):
478
+ return self._apply_reflectinfo_conv(
479
+ self.inspector.get_foreign_keys(*args, **kw)
480
+ )
481
+
482
+ def reflect_table(self, table, *, include_columns):
483
+ self.inspector.reflect_table(table, include_columns=include_columns)
484
+
485
+ # I had a cool version of this using _ReflectInfo, however that doesn't
486
+ # work in 1.4 and it's not public API in 2.x. Then this is just a two
487
+ # liner. So there's no competition...
488
+ self._apply_constraint_conv(table.constraints)
489
+ self._apply_constraint_conv(table.indexes)
490
+
491
+
492
+ @comparators.dispatch_for("table")
493
+ def _compare_indexes_and_uniques(
494
+ autogen_context: AutogenContext,
495
+ modify_ops: ModifyTableOps,
496
+ schema: Optional[str],
497
+ tname: Union[quoted_name, str],
498
+ conn_table: Optional[Table],
499
+ metadata_table: Optional[Table],
500
+ ) -> None:
501
+ inspector = autogen_context.inspector
502
+ is_create_table = conn_table is None
503
+ is_drop_table = metadata_table is None
504
+ impl = autogen_context.migration_context.impl
505
+
506
+ # 1a. get raw indexes and unique constraints from metadata ...
507
+ if metadata_table is not None:
508
+ metadata_unique_constraints = {
509
+ uq
510
+ for uq in metadata_table.constraints
511
+ if isinstance(uq, sa_schema.UniqueConstraint)
512
+ }
513
+ metadata_indexes = set(metadata_table.indexes)
514
+ else:
515
+ metadata_unique_constraints = set()
516
+ metadata_indexes = set()
517
+
518
+ conn_uniques = conn_indexes = frozenset() # type:ignore[var-annotated]
519
+
520
+ supports_unique_constraints = False
521
+
522
+ unique_constraints_duplicate_unique_indexes = False
523
+
524
+ if conn_table is not None:
525
+ # 1b. ... and from connection, if the table exists
526
+ try:
527
+ conn_uniques = _InspectorConv(inspector).get_unique_constraints(
528
+ tname, schema=schema
529
+ )
530
+
531
+ supports_unique_constraints = True
532
+ except NotImplementedError:
533
+ pass
534
+ except TypeError:
535
+ # number of arguments is off for the base
536
+ # method in SQLAlchemy due to the cache decorator
537
+ # not being present
538
+ pass
539
+ else:
540
+ conn_uniques = [ # type:ignore[assignment]
541
+ uq
542
+ for uq in conn_uniques
543
+ if autogen_context.run_name_filters(
544
+ uq["name"],
545
+ "unique_constraint",
546
+ {"table_name": tname, "schema_name": schema},
547
+ )
548
+ ]
549
+ for uq in conn_uniques:
550
+ if uq.get("duplicates_index"):
551
+ unique_constraints_duplicate_unique_indexes = True
552
+ try:
553
+ conn_indexes = _InspectorConv(inspector).get_indexes(
554
+ tname, schema=schema
555
+ )
556
+ except NotImplementedError:
557
+ pass
558
+ else:
559
+ conn_indexes = [ # type:ignore[assignment]
560
+ ix
561
+ for ix in conn_indexes
562
+ if autogen_context.run_name_filters(
563
+ ix["name"],
564
+ "index",
565
+ {"table_name": tname, "schema_name": schema},
566
+ )
567
+ ]
568
+
569
+ # 2. convert conn-level objects from raw inspector records
570
+ # into schema objects
571
+ if is_drop_table:
572
+ # for DROP TABLE uniques are inline, don't need them
573
+ conn_uniques = set() # type:ignore[assignment]
574
+ else:
575
+ conn_uniques = { # type:ignore[assignment]
576
+ _make_unique_constraint(impl, uq_def, conn_table)
577
+ for uq_def in conn_uniques
578
+ }
579
+
580
+ conn_indexes = { # type:ignore[assignment]
581
+ index
582
+ for index in (
583
+ _make_index(impl, ix, conn_table) for ix in conn_indexes
584
+ )
585
+ if index is not None
586
+ }
587
+
588
+ # 2a. if the dialect dupes unique indexes as unique constraints
589
+ # (mysql and oracle), correct for that
590
+
591
+ if unique_constraints_duplicate_unique_indexes:
592
+ _correct_for_uq_duplicates_uix(
593
+ conn_uniques,
594
+ conn_indexes,
595
+ metadata_unique_constraints,
596
+ metadata_indexes,
597
+ autogen_context.dialect,
598
+ impl,
599
+ )
600
+
601
+ # 3. give the dialect a chance to omit indexes and constraints that
602
+ # we know are either added implicitly by the DB or that the DB
603
+ # can't accurately report on
604
+ impl.correct_for_autogen_constraints(
605
+ conn_uniques, # type: ignore[arg-type]
606
+ conn_indexes, # type: ignore[arg-type]
607
+ metadata_unique_constraints,
608
+ metadata_indexes,
609
+ )
610
+
611
+ # 4. organize the constraints into "signature" collections, the
612
+ # _constraint_sig() objects provide a consistent facade over both
613
+ # Index and UniqueConstraint so we can easily work with them
614
+ # interchangeably
615
+ metadata_unique_constraints_sig = {
616
+ impl._create_metadata_constraint_sig(uq)
617
+ for uq in metadata_unique_constraints
618
+ }
619
+
620
+ metadata_indexes_sig = {
621
+ impl._create_metadata_constraint_sig(ix) for ix in metadata_indexes
622
+ }
623
+
624
+ conn_unique_constraints = {
625
+ impl._create_reflected_constraint_sig(uq) for uq in conn_uniques
626
+ }
627
+
628
+ conn_indexes_sig = {
629
+ impl._create_reflected_constraint_sig(ix) for ix in conn_indexes
630
+ }
631
+
632
+ # 5. index things by name, for those objects that have names
633
+ metadata_names = {
634
+ cast(str, c.md_name_to_sql_name(autogen_context)): c
635
+ for c in metadata_unique_constraints_sig.union(metadata_indexes_sig)
636
+ if c.is_named
637
+ }
638
+
639
+ conn_uniques_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig]
640
+ conn_indexes_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig]
641
+
642
+ conn_uniques_by_name = {c.name: c for c in conn_unique_constraints}
643
+ conn_indexes_by_name = {c.name: c for c in conn_indexes_sig}
644
+ conn_names = {
645
+ c.name: c
646
+ for c in conn_unique_constraints.union(conn_indexes_sig)
647
+ if sqla_compat.constraint_name_string(c.name)
648
+ }
649
+
650
+ doubled_constraints = {
651
+ name: (conn_uniques_by_name[name], conn_indexes_by_name[name])
652
+ for name in set(conn_uniques_by_name).intersection(
653
+ conn_indexes_by_name
654
+ )
655
+ }
656
+
657
+ # 6. index things by "column signature", to help with unnamed unique
658
+ # constraints.
659
+ conn_uniques_by_sig = {uq.unnamed: uq for uq in conn_unique_constraints}
660
+ metadata_uniques_by_sig = {
661
+ uq.unnamed: uq for uq in metadata_unique_constraints_sig
662
+ }
663
+ unnamed_metadata_uniques = {
664
+ uq.unnamed: uq
665
+ for uq in metadata_unique_constraints_sig
666
+ if not sqla_compat._constraint_is_named(
667
+ uq.const, autogen_context.dialect
668
+ )
669
+ }
670
+
671
+ # assumptions:
672
+ # 1. a unique constraint or an index from the connection *always*
673
+ # has a name.
674
+ # 2. an index on the metadata side *always* has a name.
675
+ # 3. a unique constraint on the metadata side *might* have a name.
676
+ # 4. The backend may double up indexes as unique constraints and
677
+ # vice versa (e.g. MySQL, Postgresql)
678
+
679
+ def obj_added(obj: _constraint_sig):
680
+ if is_index_sig(obj):
681
+ if autogen_context.run_object_filters(
682
+ obj.const, obj.name, "index", False, None
683
+ ):
684
+ modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const))
685
+ log.info(
686
+ "Detected added index %r on '%s'",
687
+ obj.name,
688
+ obj.column_names,
689
+ )
690
+ elif is_uq_sig(obj):
691
+ if not supports_unique_constraints:
692
+ # can't report unique indexes as added if we don't
693
+ # detect them
694
+ return
695
+ if is_create_table or is_drop_table:
696
+ # unique constraints are created inline with table defs
697
+ return
698
+ if autogen_context.run_object_filters(
699
+ obj.const, obj.name, "unique_constraint", False, None
700
+ ):
701
+ modify_ops.ops.append(
702
+ ops.AddConstraintOp.from_constraint(obj.const)
703
+ )
704
+ log.info(
705
+ "Detected added unique constraint %r on '%s'",
706
+ obj.name,
707
+ obj.column_names,
708
+ )
709
+ else:
710
+ assert False
711
+
712
+ def obj_removed(obj: _constraint_sig):
713
+ if is_index_sig(obj):
714
+ if obj.is_unique and not supports_unique_constraints:
715
+ # many databases double up unique constraints
716
+ # as unique indexes. without that list we can't
717
+ # be sure what we're doing here
718
+ return
719
+
720
+ if autogen_context.run_object_filters(
721
+ obj.const, obj.name, "index", True, None
722
+ ):
723
+ modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const))
724
+ log.info("Detected removed index %r on %r", obj.name, tname)
725
+ elif is_uq_sig(obj):
726
+ if is_create_table or is_drop_table:
727
+ # if the whole table is being dropped, we don't need to
728
+ # consider unique constraint separately
729
+ return
730
+ if autogen_context.run_object_filters(
731
+ obj.const, obj.name, "unique_constraint", True, None
732
+ ):
733
+ modify_ops.ops.append(
734
+ ops.DropConstraintOp.from_constraint(obj.const)
735
+ )
736
+ log.info(
737
+ "Detected removed unique constraint %r on %r",
738
+ obj.name,
739
+ tname,
740
+ )
741
+ else:
742
+ assert False
743
+
744
+ def obj_changed(
745
+ old: _constraint_sig,
746
+ new: _constraint_sig,
747
+ msg: str,
748
+ ):
749
+ if is_index_sig(old):
750
+ assert is_index_sig(new)
751
+
752
+ if autogen_context.run_object_filters(
753
+ new.const, new.name, "index", False, old.const
754
+ ):
755
+ log.info(
756
+ "Detected changed index %r on %r: %s", old.name, tname, msg
757
+ )
758
+ modify_ops.ops.append(ops.DropIndexOp.from_index(old.const))
759
+ modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const))
760
+ elif is_uq_sig(old):
761
+ assert is_uq_sig(new)
762
+
763
+ if autogen_context.run_object_filters(
764
+ new.const, new.name, "unique_constraint", False, old.const
765
+ ):
766
+ log.info(
767
+ "Detected changed unique constraint %r on %r: %s",
768
+ old.name,
769
+ tname,
770
+ msg,
771
+ )
772
+ modify_ops.ops.append(
773
+ ops.DropConstraintOp.from_constraint(old.const)
774
+ )
775
+ modify_ops.ops.append(
776
+ ops.AddConstraintOp.from_constraint(new.const)
777
+ )
778
+ else:
779
+ assert False
780
+
781
+ for removed_name in sorted(set(conn_names).difference(metadata_names)):
782
+ conn_obj = conn_names[removed_name]
783
+ if (
784
+ is_uq_sig(conn_obj)
785
+ and conn_obj.unnamed in unnamed_metadata_uniques
786
+ ):
787
+ continue
788
+ elif removed_name in doubled_constraints:
789
+ conn_uq, conn_idx = doubled_constraints[removed_name]
790
+ if (
791
+ all(
792
+ conn_idx.unnamed != meta_idx.unnamed
793
+ for meta_idx in metadata_indexes_sig
794
+ )
795
+ and conn_uq.unnamed not in metadata_uniques_by_sig
796
+ ):
797
+ obj_removed(conn_uq)
798
+ obj_removed(conn_idx)
799
+ else:
800
+ obj_removed(conn_obj)
801
+
802
+ for existing_name in sorted(set(metadata_names).intersection(conn_names)):
803
+ metadata_obj = metadata_names[existing_name]
804
+
805
+ if existing_name in doubled_constraints:
806
+ conn_uq, conn_idx = doubled_constraints[existing_name]
807
+ if is_index_sig(metadata_obj):
808
+ conn_obj = conn_idx
809
+ else:
810
+ conn_obj = conn_uq
811
+ else:
812
+ conn_obj = conn_names[existing_name]
813
+
814
+ if type(conn_obj) != type(metadata_obj):
815
+ obj_removed(conn_obj)
816
+ obj_added(metadata_obj)
817
+ else:
818
+ comparison = metadata_obj.compare_to_reflected(conn_obj)
819
+
820
+ if comparison.is_different:
821
+ # constraint are different
822
+ obj_changed(conn_obj, metadata_obj, comparison.message)
823
+ elif comparison.is_skip:
824
+ # constraint cannot be compared, skip them
825
+ thing = (
826
+ "index" if is_index_sig(conn_obj) else "unique constraint"
827
+ )
828
+ log.info(
829
+ "Cannot compare %s %r, assuming equal and skipping. %s",
830
+ thing,
831
+ conn_obj.name,
832
+ comparison.message,
833
+ )
834
+ else:
835
+ # constraint are equal
836
+ assert comparison.is_equal
837
+
838
+ for added_name in sorted(set(metadata_names).difference(conn_names)):
839
+ obj = metadata_names[added_name]
840
+ obj_added(obj)
841
+
842
+ for uq_sig in unnamed_metadata_uniques:
843
+ if uq_sig not in conn_uniques_by_sig:
844
+ obj_added(unnamed_metadata_uniques[uq_sig])
845
+
846
+
847
+ def _correct_for_uq_duplicates_uix(
848
+ conn_unique_constraints,
849
+ conn_indexes,
850
+ metadata_unique_constraints,
851
+ metadata_indexes,
852
+ dialect,
853
+ impl,
854
+ ):
855
+ # dedupe unique indexes vs. constraints, since MySQL / Oracle
856
+ # doesn't really have unique constraints as a separate construct.
857
+ # but look in the metadata and try to maintain constructs
858
+ # that already seem to be defined one way or the other
859
+ # on that side. This logic was formerly local to MySQL dialect,
860
+ # generalized to Oracle and others. See #276
861
+
862
+ # resolve final rendered name for unique constraints defined in the
863
+ # metadata. this includes truncation of long names. naming convention
864
+ # names currently should already be set as cons.name, however leave this
865
+ # to the sqla_compat to decide.
866
+ metadata_cons_names = [
867
+ (sqla_compat._get_constraint_final_name(cons, dialect), cons)
868
+ for cons in metadata_unique_constraints
869
+ ]
870
+
871
+ metadata_uq_names = {
872
+ name for name, cons in metadata_cons_names if name is not None
873
+ }
874
+
875
+ unnamed_metadata_uqs = {
876
+ impl._create_metadata_constraint_sig(cons).unnamed
877
+ for name, cons in metadata_cons_names
878
+ if name is None
879
+ }
880
+
881
+ metadata_ix_names = {
882
+ sqla_compat._get_constraint_final_name(cons, dialect)
883
+ for cons in metadata_indexes
884
+ if cons.unique
885
+ }
886
+
887
+ # for reflection side, names are in their final database form
888
+ # already since they're from the database
889
+ conn_ix_names = {cons.name: cons for cons in conn_indexes if cons.unique}
890
+
891
+ uqs_dupe_indexes = {
892
+ cons.name: cons
893
+ for cons in conn_unique_constraints
894
+ if cons.info["duplicates_index"]
895
+ }
896
+
897
+ for overlap in uqs_dupe_indexes:
898
+ if overlap not in metadata_uq_names:
899
+ if (
900
+ impl._create_reflected_constraint_sig(
901
+ uqs_dupe_indexes[overlap]
902
+ ).unnamed
903
+ not in unnamed_metadata_uqs
904
+ ):
905
+ conn_unique_constraints.discard(uqs_dupe_indexes[overlap])
906
+ elif overlap not in metadata_ix_names:
907
+ conn_indexes.discard(conn_ix_names[overlap])
908
+
909
+
910
+ @comparators.dispatch_for("column")
911
+ def _compare_nullable(
912
+ autogen_context: AutogenContext,
913
+ alter_column_op: AlterColumnOp,
914
+ schema: Optional[str],
915
+ tname: Union[quoted_name, str],
916
+ cname: Union[quoted_name, str],
917
+ conn_col: Column[Any],
918
+ metadata_col: Column[Any],
919
+ ) -> None:
920
+ metadata_col_nullable = metadata_col.nullable
921
+ conn_col_nullable = conn_col.nullable
922
+ alter_column_op.existing_nullable = conn_col_nullable
923
+
924
+ if conn_col_nullable is not metadata_col_nullable:
925
+ if (
926
+ sqla_compat._server_default_is_computed(
927
+ metadata_col.server_default, conn_col.server_default
928
+ )
929
+ and sqla_compat._nullability_might_be_unset(metadata_col)
930
+ or (
931
+ sqla_compat._server_default_is_identity(
932
+ metadata_col.server_default, conn_col.server_default
933
+ )
934
+ )
935
+ ):
936
+ log.info(
937
+ "Ignoring nullable change on identity column '%s.%s'",
938
+ tname,
939
+ cname,
940
+ )
941
+ else:
942
+ alter_column_op.modify_nullable = metadata_col_nullable
943
+ log.info(
944
+ "Detected %s on column '%s.%s'",
945
+ "NULL" if metadata_col_nullable else "NOT NULL",
946
+ tname,
947
+ cname,
948
+ )
949
+
950
+
951
+ @comparators.dispatch_for("column")
952
+ def _setup_autoincrement(
953
+ autogen_context: AutogenContext,
954
+ alter_column_op: AlterColumnOp,
955
+ schema: Optional[str],
956
+ tname: Union[quoted_name, str],
957
+ cname: quoted_name,
958
+ conn_col: Column[Any],
959
+ metadata_col: Column[Any],
960
+ ) -> None:
961
+ if metadata_col.table._autoincrement_column is metadata_col:
962
+ alter_column_op.kw["autoincrement"] = True
963
+ elif metadata_col.autoincrement is True:
964
+ alter_column_op.kw["autoincrement"] = True
965
+ elif metadata_col.autoincrement is False:
966
+ alter_column_op.kw["autoincrement"] = False
967
+
968
+
969
+ @comparators.dispatch_for("column")
970
+ def _compare_type(
971
+ autogen_context: AutogenContext,
972
+ alter_column_op: AlterColumnOp,
973
+ schema: Optional[str],
974
+ tname: Union[quoted_name, str],
975
+ cname: Union[quoted_name, str],
976
+ conn_col: Column[Any],
977
+ metadata_col: Column[Any],
978
+ ) -> None:
979
+ conn_type = conn_col.type
980
+ alter_column_op.existing_type = conn_type
981
+ metadata_type = metadata_col.type
982
+ if conn_type._type_affinity is sqltypes.NullType:
983
+ log.info(
984
+ "Couldn't determine database type " "for column '%s.%s'",
985
+ tname,
986
+ cname,
987
+ )
988
+ return
989
+ if metadata_type._type_affinity is sqltypes.NullType:
990
+ log.info(
991
+ "Column '%s.%s' has no type within " "the model; can't compare",
992
+ tname,
993
+ cname,
994
+ )
995
+ return
996
+
997
+ isdiff = autogen_context.migration_context._compare_type(
998
+ conn_col, metadata_col
999
+ )
1000
+
1001
+ if isdiff:
1002
+ alter_column_op.modify_type = metadata_type
1003
+ log.info(
1004
+ "Detected type change from %r to %r on '%s.%s'",
1005
+ conn_type,
1006
+ metadata_type,
1007
+ tname,
1008
+ cname,
1009
+ )
1010
+
1011
+
1012
+ def _render_server_default_for_compare(
1013
+ metadata_default: Optional[Any], autogen_context: AutogenContext
1014
+ ) -> Optional[str]:
1015
+ if isinstance(metadata_default, sa_schema.DefaultClause):
1016
+ if isinstance(metadata_default.arg, str):
1017
+ metadata_default = metadata_default.arg
1018
+ else:
1019
+ metadata_default = str(
1020
+ metadata_default.arg.compile(
1021
+ dialect=autogen_context.dialect,
1022
+ compile_kwargs={"literal_binds": True},
1023
+ )
1024
+ )
1025
+ if isinstance(metadata_default, str):
1026
+ return metadata_default
1027
+ else:
1028
+ return None
1029
+
1030
+
1031
+ def _normalize_computed_default(sqltext: str) -> str:
1032
+ """we want to warn if a computed sql expression has changed. however
1033
+ we don't want false positives and the warning is not that critical.
1034
+ so filter out most forms of variability from the SQL text.
1035
+
1036
+ """
1037
+
1038
+ return re.sub(r"[ \(\)'\"`\[\]\t\r\n]", "", sqltext).lower()
1039
+
1040
+
1041
+ def _compare_computed_default(
1042
+ autogen_context: AutogenContext,
1043
+ alter_column_op: AlterColumnOp,
1044
+ schema: Optional[str],
1045
+ tname: str,
1046
+ cname: str,
1047
+ conn_col: Column[Any],
1048
+ metadata_col: Column[Any],
1049
+ ) -> None:
1050
+ rendered_metadata_default = str(
1051
+ cast(sa_schema.Computed, metadata_col.server_default).sqltext.compile(
1052
+ dialect=autogen_context.dialect,
1053
+ compile_kwargs={"literal_binds": True},
1054
+ )
1055
+ )
1056
+
1057
+ # since we cannot change computed columns, we do only a crude comparison
1058
+ # here where we try to eliminate syntactical differences in order to
1059
+ # get a minimal comparison just to emit a warning.
1060
+
1061
+ rendered_metadata_default = _normalize_computed_default(
1062
+ rendered_metadata_default
1063
+ )
1064
+
1065
+ if isinstance(conn_col.server_default, sa_schema.Computed):
1066
+ rendered_conn_default = str(
1067
+ conn_col.server_default.sqltext.compile(
1068
+ dialect=autogen_context.dialect,
1069
+ compile_kwargs={"literal_binds": True},
1070
+ )
1071
+ )
1072
+ if rendered_conn_default is None:
1073
+ rendered_conn_default = ""
1074
+ else:
1075
+ rendered_conn_default = _normalize_computed_default(
1076
+ rendered_conn_default
1077
+ )
1078
+ else:
1079
+ rendered_conn_default = ""
1080
+
1081
+ if rendered_metadata_default != rendered_conn_default:
1082
+ _warn_computed_not_supported(tname, cname)
1083
+
1084
+
1085
+ def _warn_computed_not_supported(tname: str, cname: str) -> None:
1086
+ util.warn("Computed default on %s.%s cannot be modified" % (tname, cname))
1087
+
1088
+
1089
+ def _compare_identity_default(
1090
+ autogen_context,
1091
+ alter_column_op,
1092
+ schema,
1093
+ tname,
1094
+ cname,
1095
+ conn_col,
1096
+ metadata_col,
1097
+ ):
1098
+ impl = autogen_context.migration_context.impl
1099
+ diff, ignored_attr, is_alter = impl._compare_identity_default(
1100
+ metadata_col.server_default, conn_col.server_default
1101
+ )
1102
+
1103
+ return diff, is_alter
1104
+
1105
+
1106
+ @comparators.dispatch_for("column")
1107
+ def _compare_server_default(
1108
+ autogen_context: AutogenContext,
1109
+ alter_column_op: AlterColumnOp,
1110
+ schema: Optional[str],
1111
+ tname: Union[quoted_name, str],
1112
+ cname: Union[quoted_name, str],
1113
+ conn_col: Column[Any],
1114
+ metadata_col: Column[Any],
1115
+ ) -> Optional[bool]:
1116
+ metadata_default = metadata_col.server_default
1117
+ conn_col_default = conn_col.server_default
1118
+ if conn_col_default is None and metadata_default is None:
1119
+ return False
1120
+
1121
+ if sqla_compat._server_default_is_computed(metadata_default):
1122
+ return _compare_computed_default( # type:ignore[func-returns-value]
1123
+ autogen_context,
1124
+ alter_column_op,
1125
+ schema,
1126
+ tname,
1127
+ cname,
1128
+ conn_col,
1129
+ metadata_col,
1130
+ )
1131
+ if sqla_compat._server_default_is_computed(conn_col_default):
1132
+ _warn_computed_not_supported(tname, cname)
1133
+ return False
1134
+
1135
+ if sqla_compat._server_default_is_identity(
1136
+ metadata_default, conn_col_default
1137
+ ):
1138
+ alter_column_op.existing_server_default = conn_col_default
1139
+ diff, is_alter = _compare_identity_default(
1140
+ autogen_context,
1141
+ alter_column_op,
1142
+ schema,
1143
+ tname,
1144
+ cname,
1145
+ conn_col,
1146
+ metadata_col,
1147
+ )
1148
+ if is_alter:
1149
+ alter_column_op.modify_server_default = metadata_default
1150
+ if diff:
1151
+ log.info(
1152
+ "Detected server default on column '%s.%s': "
1153
+ "identity options attributes %s",
1154
+ tname,
1155
+ cname,
1156
+ sorted(diff),
1157
+ )
1158
+ else:
1159
+ rendered_metadata_default = _render_server_default_for_compare(
1160
+ metadata_default, autogen_context
1161
+ )
1162
+
1163
+ rendered_conn_default = (
1164
+ cast(Any, conn_col_default).arg.text if conn_col_default else None
1165
+ )
1166
+
1167
+ alter_column_op.existing_server_default = conn_col_default
1168
+
1169
+ is_diff = autogen_context.migration_context._compare_server_default(
1170
+ conn_col,
1171
+ metadata_col,
1172
+ rendered_metadata_default,
1173
+ rendered_conn_default,
1174
+ )
1175
+ if is_diff:
1176
+ alter_column_op.modify_server_default = metadata_default
1177
+ log.info("Detected server default on column '%s.%s'", tname, cname)
1178
+
1179
+ return None
1180
+
1181
+
1182
+ @comparators.dispatch_for("column")
1183
+ def _compare_column_comment(
1184
+ autogen_context: AutogenContext,
1185
+ alter_column_op: AlterColumnOp,
1186
+ schema: Optional[str],
1187
+ tname: Union[quoted_name, str],
1188
+ cname: quoted_name,
1189
+ conn_col: Column[Any],
1190
+ metadata_col: Column[Any],
1191
+ ) -> Optional[Literal[False]]:
1192
+ assert autogen_context.dialect is not None
1193
+ if not autogen_context.dialect.supports_comments:
1194
+ return None
1195
+
1196
+ metadata_comment = metadata_col.comment
1197
+ conn_col_comment = conn_col.comment
1198
+ if conn_col_comment is None and metadata_comment is None:
1199
+ return False
1200
+
1201
+ alter_column_op.existing_comment = conn_col_comment
1202
+
1203
+ if conn_col_comment != metadata_comment:
1204
+ alter_column_op.modify_comment = metadata_comment
1205
+ log.info("Detected column comment '%s.%s'", tname, cname)
1206
+
1207
+ return None
1208
+
1209
+
1210
+ @comparators.dispatch_for("table")
1211
+ def _compare_foreign_keys(
1212
+ autogen_context: AutogenContext,
1213
+ modify_table_ops: ModifyTableOps,
1214
+ schema: Optional[str],
1215
+ tname: Union[quoted_name, str],
1216
+ conn_table: Table,
1217
+ metadata_table: Table,
1218
+ ) -> None:
1219
+ # if we're doing CREATE TABLE, all FKs are created
1220
+ # inline within the table def
1221
+ if conn_table is None or metadata_table is None:
1222
+ return
1223
+
1224
+ inspector = autogen_context.inspector
1225
+ metadata_fks = {
1226
+ fk
1227
+ for fk in metadata_table.constraints
1228
+ if isinstance(fk, sa_schema.ForeignKeyConstraint)
1229
+ }
1230
+
1231
+ conn_fks_list = [
1232
+ fk
1233
+ for fk in _InspectorConv(inspector).get_foreign_keys(
1234
+ tname, schema=schema
1235
+ )
1236
+ if autogen_context.run_name_filters(
1237
+ fk["name"],
1238
+ "foreign_key_constraint",
1239
+ {"table_name": tname, "schema_name": schema},
1240
+ )
1241
+ ]
1242
+
1243
+ conn_fks = {
1244
+ _make_foreign_key(const, conn_table) for const in conn_fks_list
1245
+ }
1246
+
1247
+ impl = autogen_context.migration_context.impl
1248
+
1249
+ # give the dialect a chance to correct the FKs to match more
1250
+ # closely
1251
+ autogen_context.migration_context.impl.correct_for_autogen_foreignkeys(
1252
+ conn_fks, metadata_fks
1253
+ )
1254
+
1255
+ metadata_fks_sig = {
1256
+ impl._create_metadata_constraint_sig(fk) for fk in metadata_fks
1257
+ }
1258
+
1259
+ conn_fks_sig = {
1260
+ impl._create_reflected_constraint_sig(fk) for fk in conn_fks
1261
+ }
1262
+
1263
+ # check if reflected FKs include options, indicating the backend
1264
+ # can reflect FK options
1265
+ if conn_fks_list and "options" in conn_fks_list[0]:
1266
+ conn_fks_by_sig = {c.unnamed: c for c in conn_fks_sig}
1267
+ metadata_fks_by_sig = {c.unnamed: c for c in metadata_fks_sig}
1268
+ else:
1269
+ # otherwise compare by sig without options added
1270
+ conn_fks_by_sig = {c.unnamed_no_options: c for c in conn_fks_sig}
1271
+ metadata_fks_by_sig = {
1272
+ c.unnamed_no_options: c for c in metadata_fks_sig
1273
+ }
1274
+
1275
+ metadata_fks_by_name = {
1276
+ c.name: c for c in metadata_fks_sig if c.name is not None
1277
+ }
1278
+ conn_fks_by_name = {c.name: c for c in conn_fks_sig if c.name is not None}
1279
+
1280
+ def _add_fk(obj, compare_to):
1281
+ if autogen_context.run_object_filters(
1282
+ obj.const, obj.name, "foreign_key_constraint", False, compare_to
1283
+ ):
1284
+ modify_table_ops.ops.append(
1285
+ ops.CreateForeignKeyOp.from_constraint(const.const)
1286
+ )
1287
+
1288
+ log.info(
1289
+ "Detected added foreign key (%s)(%s) on table %s%s",
1290
+ ", ".join(obj.source_columns),
1291
+ ", ".join(obj.target_columns),
1292
+ "%s." % obj.source_schema if obj.source_schema else "",
1293
+ obj.source_table,
1294
+ )
1295
+
1296
+ def _remove_fk(obj, compare_to):
1297
+ if autogen_context.run_object_filters(
1298
+ obj.const, obj.name, "foreign_key_constraint", True, compare_to
1299
+ ):
1300
+ modify_table_ops.ops.append(
1301
+ ops.DropConstraintOp.from_constraint(obj.const)
1302
+ )
1303
+ log.info(
1304
+ "Detected removed foreign key (%s)(%s) on table %s%s",
1305
+ ", ".join(obj.source_columns),
1306
+ ", ".join(obj.target_columns),
1307
+ "%s." % obj.source_schema if obj.source_schema else "",
1308
+ obj.source_table,
1309
+ )
1310
+
1311
+ # so far it appears we don't need to do this by name at all.
1312
+ # SQLite doesn't preserve constraint names anyway
1313
+
1314
+ for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig):
1315
+ const = conn_fks_by_sig[removed_sig]
1316
+ if removed_sig not in metadata_fks_by_sig:
1317
+ compare_to = (
1318
+ metadata_fks_by_name[const.name].const
1319
+ if const.name in metadata_fks_by_name
1320
+ else None
1321
+ )
1322
+ _remove_fk(const, compare_to)
1323
+
1324
+ for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig):
1325
+ const = metadata_fks_by_sig[added_sig]
1326
+ if added_sig not in conn_fks_by_sig:
1327
+ compare_to = (
1328
+ conn_fks_by_name[const.name].const
1329
+ if const.name in conn_fks_by_name
1330
+ else None
1331
+ )
1332
+ _add_fk(const, compare_to)
1333
+
1334
+
1335
+ @comparators.dispatch_for("table")
1336
+ def _compare_table_comment(
1337
+ autogen_context: AutogenContext,
1338
+ modify_table_ops: ModifyTableOps,
1339
+ schema: Optional[str],
1340
+ tname: Union[quoted_name, str],
1341
+ conn_table: Optional[Table],
1342
+ metadata_table: Optional[Table],
1343
+ ) -> None:
1344
+ assert autogen_context.dialect is not None
1345
+ if not autogen_context.dialect.supports_comments:
1346
+ return
1347
+
1348
+ # if we're doing CREATE TABLE, comments will be created inline
1349
+ # with the create_table op.
1350
+ if conn_table is None or metadata_table is None:
1351
+ return
1352
+
1353
+ if conn_table.comment is None and metadata_table.comment is None:
1354
+ return
1355
+
1356
+ if metadata_table.comment is None and conn_table.comment is not None:
1357
+ modify_table_ops.ops.append(
1358
+ ops.DropTableCommentOp(
1359
+ tname, existing_comment=conn_table.comment, schema=schema
1360
+ )
1361
+ )
1362
+ elif metadata_table.comment != conn_table.comment:
1363
+ modify_table_ops.ops.append(
1364
+ ops.CreateTableCommentOp(
1365
+ tname,
1366
+ metadata_table.comment,
1367
+ existing_comment=conn_table.comment,
1368
+ schema=schema,
1369
+ )
1370
+ )
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/render.py ADDED
@@ -0,0 +1,1172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ from io import StringIO
7
+ import re
8
+ from typing import Any
9
+ from typing import cast
10
+ from typing import Dict
11
+ from typing import List
12
+ from typing import Optional
13
+ from typing import Tuple
14
+ from typing import TYPE_CHECKING
15
+ from typing import Union
16
+
17
+ from mako.pygen import PythonPrinter
18
+ from sqlalchemy import schema as sa_schema
19
+ from sqlalchemy import sql
20
+ from sqlalchemy import types as sqltypes
21
+ from sqlalchemy.sql.base import _DialectArgView
22
+ from sqlalchemy.sql.elements import conv
23
+ from sqlalchemy.sql.elements import Label
24
+ from sqlalchemy.sql.elements import quoted_name
25
+
26
+ from .. import util
27
+ from ..operations import ops
28
+ from ..util import sqla_compat
29
+
30
+ if TYPE_CHECKING:
31
+ from typing import Literal
32
+
33
+ from sqlalchemy import Computed
34
+ from sqlalchemy import Identity
35
+ from sqlalchemy.sql.elements import ColumnElement
36
+ from sqlalchemy.sql.elements import TextClause
37
+ from sqlalchemy.sql.schema import CheckConstraint
38
+ from sqlalchemy.sql.schema import Column
39
+ from sqlalchemy.sql.schema import Constraint
40
+ from sqlalchemy.sql.schema import FetchedValue
41
+ from sqlalchemy.sql.schema import ForeignKey
42
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
43
+ from sqlalchemy.sql.schema import Index
44
+ from sqlalchemy.sql.schema import MetaData
45
+ from sqlalchemy.sql.schema import PrimaryKeyConstraint
46
+ from sqlalchemy.sql.schema import UniqueConstraint
47
+ from sqlalchemy.sql.sqltypes import ARRAY
48
+ from sqlalchemy.sql.type_api import TypeEngine
49
+
50
+ from alembic.autogenerate.api import AutogenContext
51
+ from alembic.config import Config
52
+ from alembic.operations.ops import MigrationScript
53
+ from alembic.operations.ops import ModifyTableOps
54
+
55
+
56
+ MAX_PYTHON_ARGS = 255
57
+
58
+
59
+ def _render_gen_name(
60
+ autogen_context: AutogenContext,
61
+ name: sqla_compat._ConstraintName,
62
+ ) -> Optional[Union[quoted_name, str, _f_name]]:
63
+ if isinstance(name, conv):
64
+ return _f_name(_alembic_autogenerate_prefix(autogen_context), name)
65
+ else:
66
+ return sqla_compat.constraint_name_or_none(name)
67
+
68
+
69
+ def _indent(text: str) -> str:
70
+ text = re.compile(r"^", re.M).sub(" ", text).strip()
71
+ text = re.compile(r" +$", re.M).sub("", text)
72
+ return text
73
+
74
+
75
+ def _render_python_into_templatevars(
76
+ autogen_context: AutogenContext,
77
+ migration_script: MigrationScript,
78
+ template_args: Dict[str, Union[str, Config]],
79
+ ) -> None:
80
+ imports = autogen_context.imports
81
+
82
+ for upgrade_ops, downgrade_ops in zip(
83
+ migration_script.upgrade_ops_list, migration_script.downgrade_ops_list
84
+ ):
85
+ template_args[upgrade_ops.upgrade_token] = _indent(
86
+ _render_cmd_body(upgrade_ops, autogen_context)
87
+ )
88
+ template_args[downgrade_ops.downgrade_token] = _indent(
89
+ _render_cmd_body(downgrade_ops, autogen_context)
90
+ )
91
+ template_args["imports"] = "\n".join(sorted(imports))
92
+
93
+
94
+ default_renderers = renderers = util.Dispatcher()
95
+
96
+
97
+ def _render_cmd_body(
98
+ op_container: ops.OpContainer,
99
+ autogen_context: AutogenContext,
100
+ ) -> str:
101
+ buf = StringIO()
102
+ printer = PythonPrinter(buf)
103
+
104
+ printer.writeline(
105
+ "# ### commands auto generated by Alembic - please adjust! ###"
106
+ )
107
+
108
+ has_lines = False
109
+ for op in op_container.ops:
110
+ lines = render_op(autogen_context, op)
111
+ has_lines = has_lines or bool(lines)
112
+
113
+ for line in lines:
114
+ printer.writeline(line)
115
+
116
+ if not has_lines:
117
+ printer.writeline("pass")
118
+
119
+ printer.writeline("# ### end Alembic commands ###")
120
+
121
+ return buf.getvalue()
122
+
123
+
124
+ def render_op(
125
+ autogen_context: AutogenContext, op: ops.MigrateOperation
126
+ ) -> List[str]:
127
+ renderer = renderers.dispatch(op)
128
+ lines = util.to_list(renderer(autogen_context, op))
129
+ return lines
130
+
131
+
132
+ def render_op_text(
133
+ autogen_context: AutogenContext, op: ops.MigrateOperation
134
+ ) -> str:
135
+ return "\n".join(render_op(autogen_context, op))
136
+
137
+
138
+ @renderers.dispatch_for(ops.ModifyTableOps)
139
+ def _render_modify_table(
140
+ autogen_context: AutogenContext, op: ModifyTableOps
141
+ ) -> List[str]:
142
+ opts = autogen_context.opts
143
+ render_as_batch = opts.get("render_as_batch", False)
144
+
145
+ if op.ops:
146
+ lines = []
147
+ if render_as_batch:
148
+ with autogen_context._within_batch():
149
+ lines.append(
150
+ "with op.batch_alter_table(%r, schema=%r) as batch_op:"
151
+ % (op.table_name, op.schema)
152
+ )
153
+ for t_op in op.ops:
154
+ t_lines = render_op(autogen_context, t_op)
155
+ lines.extend(t_lines)
156
+ lines.append("")
157
+ else:
158
+ for t_op in op.ops:
159
+ t_lines = render_op(autogen_context, t_op)
160
+ lines.extend(t_lines)
161
+
162
+ return lines
163
+ else:
164
+ return []
165
+
166
+
167
+ @renderers.dispatch_for(ops.CreateTableCommentOp)
168
+ def _render_create_table_comment(
169
+ autogen_context: AutogenContext, op: ops.CreateTableCommentOp
170
+ ) -> str:
171
+ if autogen_context._has_batch:
172
+ templ = (
173
+ "{prefix}create_table_comment(\n"
174
+ "{indent}{comment},\n"
175
+ "{indent}existing_comment={existing}\n"
176
+ ")"
177
+ )
178
+ else:
179
+ templ = (
180
+ "{prefix}create_table_comment(\n"
181
+ "{indent}'{tname}',\n"
182
+ "{indent}{comment},\n"
183
+ "{indent}existing_comment={existing},\n"
184
+ "{indent}schema={schema}\n"
185
+ ")"
186
+ )
187
+ return templ.format(
188
+ prefix=_alembic_autogenerate_prefix(autogen_context),
189
+ tname=op.table_name,
190
+ comment="%r" % op.comment if op.comment is not None else None,
191
+ existing=(
192
+ "%r" % op.existing_comment
193
+ if op.existing_comment is not None
194
+ else None
195
+ ),
196
+ schema="'%s'" % op.schema if op.schema is not None else None,
197
+ indent=" ",
198
+ )
199
+
200
+
201
+ @renderers.dispatch_for(ops.DropTableCommentOp)
202
+ def _render_drop_table_comment(
203
+ autogen_context: AutogenContext, op: ops.DropTableCommentOp
204
+ ) -> str:
205
+ if autogen_context._has_batch:
206
+ templ = (
207
+ "{prefix}drop_table_comment(\n"
208
+ "{indent}existing_comment={existing}\n"
209
+ ")"
210
+ )
211
+ else:
212
+ templ = (
213
+ "{prefix}drop_table_comment(\n"
214
+ "{indent}'{tname}',\n"
215
+ "{indent}existing_comment={existing},\n"
216
+ "{indent}schema={schema}\n"
217
+ ")"
218
+ )
219
+ return templ.format(
220
+ prefix=_alembic_autogenerate_prefix(autogen_context),
221
+ tname=op.table_name,
222
+ existing=(
223
+ "%r" % op.existing_comment
224
+ if op.existing_comment is not None
225
+ else None
226
+ ),
227
+ schema="'%s'" % op.schema if op.schema is not None else None,
228
+ indent=" ",
229
+ )
230
+
231
+
232
+ @renderers.dispatch_for(ops.CreateTableOp)
233
+ def _add_table(autogen_context: AutogenContext, op: ops.CreateTableOp) -> str:
234
+ table = op.to_table()
235
+
236
+ args = [
237
+ col
238
+ for col in [
239
+ _render_column(col, autogen_context) for col in table.columns
240
+ ]
241
+ if col
242
+ ] + sorted(
243
+ [
244
+ rcons
245
+ for rcons in [
246
+ _render_constraint(
247
+ cons, autogen_context, op._namespace_metadata
248
+ )
249
+ for cons in table.constraints
250
+ ]
251
+ if rcons is not None
252
+ ]
253
+ )
254
+
255
+ if len(args) > MAX_PYTHON_ARGS:
256
+ args_str = "*[" + ",\n".join(args) + "]"
257
+ else:
258
+ args_str = ",\n".join(args)
259
+
260
+ text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % {
261
+ "tablename": _ident(op.table_name),
262
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
263
+ "args": args_str,
264
+ }
265
+ if op.schema:
266
+ text += ",\nschema=%r" % _ident(op.schema)
267
+
268
+ comment = table.comment
269
+ if comment:
270
+ text += ",\ncomment=%r" % _ident(comment)
271
+
272
+ info = table.info
273
+ if info:
274
+ text += f",\ninfo={info!r}"
275
+
276
+ for k in sorted(op.kw):
277
+ text += ",\n%s=%r" % (k.replace(" ", "_"), op.kw[k])
278
+
279
+ if table._prefixes:
280
+ prefixes = ", ".join("'%s'" % p for p in table._prefixes)
281
+ text += ",\nprefixes=[%s]" % prefixes
282
+
283
+ if op.if_not_exists is not None:
284
+ text += ",\nif_not_exists=%r" % bool(op.if_not_exists)
285
+
286
+ text += "\n)"
287
+ return text
288
+
289
+
290
+ @renderers.dispatch_for(ops.DropTableOp)
291
+ def _drop_table(autogen_context: AutogenContext, op: ops.DropTableOp) -> str:
292
+ text = "%(prefix)sdrop_table(%(tname)r" % {
293
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
294
+ "tname": _ident(op.table_name),
295
+ }
296
+ if op.schema:
297
+ text += ", schema=%r" % _ident(op.schema)
298
+
299
+ if op.if_exists is not None:
300
+ text += ", if_exists=%r" % bool(op.if_exists)
301
+
302
+ text += ")"
303
+ return text
304
+
305
+
306
+ def _render_dialect_kwargs_items(
307
+ autogen_context: AutogenContext, dialect_kwargs: _DialectArgView
308
+ ) -> list[str]:
309
+ return [
310
+ f"{key}={_render_potential_expr(val, autogen_context)}"
311
+ for key, val in dialect_kwargs.items()
312
+ ]
313
+
314
+
315
+ @renderers.dispatch_for(ops.CreateIndexOp)
316
+ def _add_index(autogen_context: AutogenContext, op: ops.CreateIndexOp) -> str:
317
+ index = op.to_index()
318
+
319
+ has_batch = autogen_context._has_batch
320
+
321
+ if has_batch:
322
+ tmpl = (
323
+ "%(prefix)screate_index(%(name)r, [%(columns)s], "
324
+ "unique=%(unique)r%(kwargs)s)"
325
+ )
326
+ else:
327
+ tmpl = (
328
+ "%(prefix)screate_index(%(name)r, %(table)r, [%(columns)s], "
329
+ "unique=%(unique)r%(schema)s%(kwargs)s)"
330
+ )
331
+
332
+ assert index.table is not None
333
+
334
+ opts = _render_dialect_kwargs_items(autogen_context, index.dialect_kwargs)
335
+ if op.if_not_exists is not None:
336
+ opts.append("if_not_exists=%r" % bool(op.if_not_exists))
337
+ text = tmpl % {
338
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
339
+ "name": _render_gen_name(autogen_context, index.name),
340
+ "table": _ident(index.table.name),
341
+ "columns": ", ".join(
342
+ _get_index_rendered_expressions(index, autogen_context)
343
+ ),
344
+ "unique": index.unique or False,
345
+ "schema": (
346
+ (", schema=%r" % _ident(index.table.schema))
347
+ if index.table.schema
348
+ else ""
349
+ ),
350
+ "kwargs": ", " + ", ".join(opts) if opts else "",
351
+ }
352
+ return text
353
+
354
+
355
+ @renderers.dispatch_for(ops.DropIndexOp)
356
+ def _drop_index(autogen_context: AutogenContext, op: ops.DropIndexOp) -> str:
357
+ index = op.to_index()
358
+
359
+ has_batch = autogen_context._has_batch
360
+
361
+ if has_batch:
362
+ tmpl = "%(prefix)sdrop_index(%(name)r%(kwargs)s)"
363
+ else:
364
+ tmpl = (
365
+ "%(prefix)sdrop_index(%(name)r, "
366
+ "table_name=%(table_name)r%(schema)s%(kwargs)s)"
367
+ )
368
+ opts = _render_dialect_kwargs_items(autogen_context, index.dialect_kwargs)
369
+ if op.if_exists is not None:
370
+ opts.append("if_exists=%r" % bool(op.if_exists))
371
+ text = tmpl % {
372
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
373
+ "name": _render_gen_name(autogen_context, op.index_name),
374
+ "table_name": _ident(op.table_name),
375
+ "schema": ((", schema=%r" % _ident(op.schema)) if op.schema else ""),
376
+ "kwargs": ", " + ", ".join(opts) if opts else "",
377
+ }
378
+ return text
379
+
380
+
381
+ @renderers.dispatch_for(ops.CreateUniqueConstraintOp)
382
+ def _add_unique_constraint(
383
+ autogen_context: AutogenContext, op: ops.CreateUniqueConstraintOp
384
+ ) -> List[str]:
385
+ return [_uq_constraint(op.to_constraint(), autogen_context, True)]
386
+
387
+
388
+ @renderers.dispatch_for(ops.CreateForeignKeyOp)
389
+ def _add_fk_constraint(
390
+ autogen_context: AutogenContext, op: ops.CreateForeignKeyOp
391
+ ) -> str:
392
+ constraint = op.to_constraint()
393
+ args = [repr(_render_gen_name(autogen_context, op.constraint_name))]
394
+ if not autogen_context._has_batch:
395
+ args.append(repr(_ident(op.source_table)))
396
+
397
+ args.extend(
398
+ [
399
+ repr(_ident(op.referent_table)),
400
+ repr([_ident(col) for col in op.local_cols]),
401
+ repr([_ident(col) for col in op.remote_cols]),
402
+ ]
403
+ )
404
+ kwargs = [
405
+ "referent_schema",
406
+ "onupdate",
407
+ "ondelete",
408
+ "initially",
409
+ "deferrable",
410
+ "use_alter",
411
+ "match",
412
+ ]
413
+ if not autogen_context._has_batch:
414
+ kwargs.insert(0, "source_schema")
415
+
416
+ for k in kwargs:
417
+ if k in op.kw:
418
+ value = op.kw[k]
419
+ if value is not None:
420
+ args.append("%s=%r" % (k, value))
421
+
422
+ dialect_kwargs = _render_dialect_kwargs_items(
423
+ autogen_context, constraint.dialect_kwargs
424
+ )
425
+
426
+ return "%(prefix)screate_foreign_key(%(args)s%(dialect_kwargs)s)" % {
427
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
428
+ "args": ", ".join(args),
429
+ "dialect_kwargs": (
430
+ ", " + ", ".join(dialect_kwargs) if dialect_kwargs else ""
431
+ ),
432
+ }
433
+
434
+
435
+ @renderers.dispatch_for(ops.CreatePrimaryKeyOp)
436
+ def _add_pk_constraint(constraint, autogen_context):
437
+ raise NotImplementedError()
438
+
439
+
440
+ @renderers.dispatch_for(ops.CreateCheckConstraintOp)
441
+ def _add_check_constraint(constraint, autogen_context):
442
+ raise NotImplementedError()
443
+
444
+
445
+ @renderers.dispatch_for(ops.DropConstraintOp)
446
+ def _drop_constraint(
447
+ autogen_context: AutogenContext, op: ops.DropConstraintOp
448
+ ) -> str:
449
+ prefix = _alembic_autogenerate_prefix(autogen_context)
450
+ name = _render_gen_name(autogen_context, op.constraint_name)
451
+ schema = _ident(op.schema) if op.schema else None
452
+ type_ = _ident(op.constraint_type) if op.constraint_type else None
453
+ if_exists = op.if_exists
454
+ params_strs = []
455
+ params_strs.append(repr(name))
456
+ if not autogen_context._has_batch:
457
+ params_strs.append(repr(_ident(op.table_name)))
458
+ if schema is not None:
459
+ params_strs.append(f"schema={schema!r}")
460
+ if type_ is not None:
461
+ params_strs.append(f"type_={type_!r}")
462
+ if if_exists is not None:
463
+ params_strs.append(f"if_exists={if_exists}")
464
+
465
+ return f"{prefix}drop_constraint({', '.join(params_strs)})"
466
+
467
+
468
+ @renderers.dispatch_for(ops.AddColumnOp)
469
+ def _add_column(autogen_context: AutogenContext, op: ops.AddColumnOp) -> str:
470
+ schema, tname, column, if_not_exists = (
471
+ op.schema,
472
+ op.table_name,
473
+ op.column,
474
+ op.if_not_exists,
475
+ )
476
+ if autogen_context._has_batch:
477
+ template = "%(prefix)sadd_column(%(column)s)"
478
+ else:
479
+ template = "%(prefix)sadd_column(%(tname)r, %(column)s"
480
+ if schema:
481
+ template += ", schema=%(schema)r"
482
+ if if_not_exists is not None:
483
+ template += ", if_not_exists=%(if_not_exists)r"
484
+ template += ")"
485
+ text = template % {
486
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
487
+ "tname": tname,
488
+ "column": _render_column(column, autogen_context),
489
+ "schema": schema,
490
+ "if_not_exists": if_not_exists,
491
+ }
492
+ return text
493
+
494
+
495
+ @renderers.dispatch_for(ops.DropColumnOp)
496
+ def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str:
497
+ schema, tname, column_name, if_exists = (
498
+ op.schema,
499
+ op.table_name,
500
+ op.column_name,
501
+ op.if_exists,
502
+ )
503
+
504
+ if autogen_context._has_batch:
505
+ template = "%(prefix)sdrop_column(%(cname)r)"
506
+ else:
507
+ template = "%(prefix)sdrop_column(%(tname)r, %(cname)r"
508
+ if schema:
509
+ template += ", schema=%(schema)r"
510
+ if if_exists is not None:
511
+ template += ", if_exists=%(if_exists)r"
512
+ template += ")"
513
+
514
+ text = template % {
515
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
516
+ "tname": _ident(tname),
517
+ "cname": _ident(column_name),
518
+ "schema": _ident(schema),
519
+ "if_exists": if_exists,
520
+ }
521
+ return text
522
+
523
+
524
+ @renderers.dispatch_for(ops.AlterColumnOp)
525
+ def _alter_column(
526
+ autogen_context: AutogenContext, op: ops.AlterColumnOp
527
+ ) -> str:
528
+ tname = op.table_name
529
+ cname = op.column_name
530
+ server_default = op.modify_server_default
531
+ type_ = op.modify_type
532
+ nullable = op.modify_nullable
533
+ comment = op.modify_comment
534
+ newname = op.modify_name
535
+ autoincrement = op.kw.get("autoincrement", None)
536
+ existing_type = op.existing_type
537
+ existing_nullable = op.existing_nullable
538
+ existing_comment = op.existing_comment
539
+ existing_server_default = op.existing_server_default
540
+ schema = op.schema
541
+
542
+ indent = " " * 11
543
+
544
+ if autogen_context._has_batch:
545
+ template = "%(prefix)salter_column(%(cname)r"
546
+ else:
547
+ template = "%(prefix)salter_column(%(tname)r, %(cname)r"
548
+
549
+ text = template % {
550
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
551
+ "tname": tname,
552
+ "cname": cname,
553
+ }
554
+ if existing_type is not None:
555
+ text += ",\n%sexisting_type=%s" % (
556
+ indent,
557
+ _repr_type(existing_type, autogen_context),
558
+ )
559
+ if server_default is not False:
560
+ rendered = _render_server_default(server_default, autogen_context)
561
+ text += ",\n%sserver_default=%s" % (indent, rendered)
562
+
563
+ if newname is not None:
564
+ text += ",\n%snew_column_name=%r" % (indent, newname)
565
+ if type_ is not None:
566
+ text += ",\n%stype_=%s" % (indent, _repr_type(type_, autogen_context))
567
+ if nullable is not None:
568
+ text += ",\n%snullable=%r" % (indent, nullable)
569
+ if comment is not False:
570
+ text += ",\n%scomment=%r" % (indent, comment)
571
+ if existing_comment is not None:
572
+ text += ",\n%sexisting_comment=%r" % (indent, existing_comment)
573
+ if nullable is None and existing_nullable is not None:
574
+ text += ",\n%sexisting_nullable=%r" % (indent, existing_nullable)
575
+ if autoincrement is not None:
576
+ text += ",\n%sautoincrement=%r" % (indent, autoincrement)
577
+ if server_default is False and existing_server_default:
578
+ rendered = _render_server_default(
579
+ existing_server_default, autogen_context
580
+ )
581
+ text += ",\n%sexisting_server_default=%s" % (indent, rendered)
582
+ if schema and not autogen_context._has_batch:
583
+ text += ",\n%sschema=%r" % (indent, schema)
584
+ text += ")"
585
+ return text
586
+
587
+
588
+ class _f_name:
589
+ def __init__(self, prefix: str, name: conv) -> None:
590
+ self.prefix = prefix
591
+ self.name = name
592
+
593
+ def __repr__(self) -> str:
594
+ return "%sf(%r)" % (self.prefix, _ident(self.name))
595
+
596
+
597
+ def _ident(name: Optional[Union[quoted_name, str]]) -> Optional[str]:
598
+ """produce a __repr__() object for a string identifier that may
599
+ use quoted_name() in SQLAlchemy 0.9 and greater.
600
+
601
+ The issue worked around here is that quoted_name() doesn't have
602
+ very good repr() behavior by itself when unicode is involved.
603
+
604
+ """
605
+ if name is None:
606
+ return name
607
+ elif isinstance(name, quoted_name):
608
+ return str(name)
609
+ elif isinstance(name, str):
610
+ return name
611
+
612
+
613
+ def _render_potential_expr(
614
+ value: Any,
615
+ autogen_context: AutogenContext,
616
+ *,
617
+ wrap_in_element: bool = True,
618
+ is_server_default: bool = False,
619
+ is_index: bool = False,
620
+ ) -> str:
621
+ if isinstance(value, sql.ClauseElement):
622
+ sql_text = autogen_context.migration_context.impl.render_ddl_sql_expr(
623
+ value, is_server_default=is_server_default, is_index=is_index
624
+ )
625
+ if wrap_in_element:
626
+ prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
627
+ element = "literal_column" if is_index else "text"
628
+ value_str = f"{prefix}{element}({sql_text!r})"
629
+ if (
630
+ is_index
631
+ and isinstance(value, Label)
632
+ and type(value.name) is str
633
+ ):
634
+ return value_str + f".label({value.name!r})"
635
+ else:
636
+ return value_str
637
+ else:
638
+ return repr(sql_text)
639
+ else:
640
+ return repr(value)
641
+
642
+
643
+ def _get_index_rendered_expressions(
644
+ idx: Index, autogen_context: AutogenContext
645
+ ) -> List[str]:
646
+ return [
647
+ (
648
+ repr(_ident(getattr(exp, "name", None)))
649
+ if isinstance(exp, sa_schema.Column)
650
+ else _render_potential_expr(exp, autogen_context, is_index=True)
651
+ )
652
+ for exp in idx.expressions
653
+ ]
654
+
655
+
656
+ def _uq_constraint(
657
+ constraint: UniqueConstraint,
658
+ autogen_context: AutogenContext,
659
+ alter: bool,
660
+ ) -> str:
661
+ opts: List[Tuple[str, Any]] = []
662
+
663
+ has_batch = autogen_context._has_batch
664
+
665
+ if constraint.deferrable:
666
+ opts.append(("deferrable", constraint.deferrable))
667
+ if constraint.initially:
668
+ opts.append(("initially", constraint.initially))
669
+ if not has_batch and alter and constraint.table.schema:
670
+ opts.append(("schema", _ident(constraint.table.schema)))
671
+ if not alter and constraint.name:
672
+ opts.append(
673
+ ("name", _render_gen_name(autogen_context, constraint.name))
674
+ )
675
+ dialect_options = _render_dialect_kwargs_items(
676
+ autogen_context, constraint.dialect_kwargs
677
+ )
678
+
679
+ if alter:
680
+ args = [repr(_render_gen_name(autogen_context, constraint.name))]
681
+ if not has_batch:
682
+ args += [repr(_ident(constraint.table.name))]
683
+ args.append(repr([_ident(col.name) for col in constraint.columns]))
684
+ args.extend(["%s=%r" % (k, v) for k, v in opts])
685
+ args.extend(dialect_options)
686
+ return "%(prefix)screate_unique_constraint(%(args)s)" % {
687
+ "prefix": _alembic_autogenerate_prefix(autogen_context),
688
+ "args": ", ".join(args),
689
+ }
690
+ else:
691
+ args = [repr(_ident(col.name)) for col in constraint.columns]
692
+ args.extend(["%s=%r" % (k, v) for k, v in opts])
693
+ args.extend(dialect_options)
694
+ return "%(prefix)sUniqueConstraint(%(args)s)" % {
695
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
696
+ "args": ", ".join(args),
697
+ }
698
+
699
+
700
+ def _user_autogenerate_prefix(autogen_context, target):
701
+ prefix = autogen_context.opts["user_module_prefix"]
702
+ if prefix is None:
703
+ return "%s." % target.__module__
704
+ else:
705
+ return prefix
706
+
707
+
708
+ def _sqlalchemy_autogenerate_prefix(autogen_context: AutogenContext) -> str:
709
+ return autogen_context.opts["sqlalchemy_module_prefix"] or ""
710
+
711
+
712
+ def _alembic_autogenerate_prefix(autogen_context: AutogenContext) -> str:
713
+ if autogen_context._has_batch:
714
+ return "batch_op."
715
+ else:
716
+ return autogen_context.opts["alembic_module_prefix"] or ""
717
+
718
+
719
+ def _user_defined_render(
720
+ type_: str, object_: Any, autogen_context: AutogenContext
721
+ ) -> Union[str, Literal[False]]:
722
+ if "render_item" in autogen_context.opts:
723
+ render = autogen_context.opts["render_item"]
724
+ if render:
725
+ rendered = render(type_, object_, autogen_context)
726
+ if rendered is not False:
727
+ return rendered
728
+ return False
729
+
730
+
731
+ def _render_column(
732
+ column: Column[Any], autogen_context: AutogenContext
733
+ ) -> str:
734
+ rendered = _user_defined_render("column", column, autogen_context)
735
+ if rendered is not False:
736
+ return rendered
737
+
738
+ args: List[str] = []
739
+ opts: List[Tuple[str, Any]] = []
740
+
741
+ if column.server_default:
742
+ rendered = _render_server_default( # type:ignore[assignment]
743
+ column.server_default, autogen_context
744
+ )
745
+ if rendered:
746
+ if _should_render_server_default_positionally(
747
+ column.server_default
748
+ ):
749
+ args.append(rendered)
750
+ else:
751
+ opts.append(("server_default", rendered))
752
+
753
+ if (
754
+ column.autoincrement is not None
755
+ and column.autoincrement != sqla_compat.AUTOINCREMENT_DEFAULT
756
+ ):
757
+ opts.append(("autoincrement", column.autoincrement))
758
+
759
+ if column.nullable is not None:
760
+ opts.append(("nullable", column.nullable))
761
+
762
+ if column.system:
763
+ opts.append(("system", column.system))
764
+
765
+ comment = column.comment
766
+ if comment:
767
+ opts.append(("comment", "%r" % comment))
768
+
769
+ # TODO: for non-ascii colname, assign a "key"
770
+ return "%(prefix)sColumn(%(name)r, %(type)s, %(args)s%(kwargs)s)" % {
771
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
772
+ "name": _ident(column.name),
773
+ "type": _repr_type(column.type, autogen_context),
774
+ "args": ", ".join([str(arg) for arg in args]) + ", " if args else "",
775
+ "kwargs": (
776
+ ", ".join(
777
+ ["%s=%s" % (kwname, val) for kwname, val in opts]
778
+ + [
779
+ "%s=%s"
780
+ % (key, _render_potential_expr(val, autogen_context))
781
+ for key, val in column.kwargs.items()
782
+ ]
783
+ )
784
+ ),
785
+ }
786
+
787
+
788
+ def _should_render_server_default_positionally(server_default: Any) -> bool:
789
+ return sqla_compat._server_default_is_computed(
790
+ server_default
791
+ ) or sqla_compat._server_default_is_identity(server_default)
792
+
793
+
794
+ def _render_server_default(
795
+ default: Optional[
796
+ Union[FetchedValue, str, TextClause, ColumnElement[Any]]
797
+ ],
798
+ autogen_context: AutogenContext,
799
+ repr_: bool = True,
800
+ ) -> Optional[str]:
801
+ rendered = _user_defined_render("server_default", default, autogen_context)
802
+ if rendered is not False:
803
+ return rendered
804
+
805
+ if sqla_compat._server_default_is_computed(default):
806
+ return _render_computed(cast("Computed", default), autogen_context)
807
+ elif sqla_compat._server_default_is_identity(default):
808
+ return _render_identity(cast("Identity", default), autogen_context)
809
+ elif isinstance(default, sa_schema.DefaultClause):
810
+ if isinstance(default.arg, str):
811
+ default = default.arg
812
+ else:
813
+ return _render_potential_expr(
814
+ default.arg, autogen_context, is_server_default=True
815
+ )
816
+ elif isinstance(default, sa_schema.FetchedValue):
817
+ return _render_fetched_value(autogen_context)
818
+
819
+ if isinstance(default, str) and repr_:
820
+ default = repr(re.sub(r"^'|'$", "", default))
821
+
822
+ return cast(str, default)
823
+
824
+
825
+ def _render_computed(
826
+ computed: Computed, autogen_context: AutogenContext
827
+ ) -> str:
828
+ text = _render_potential_expr(
829
+ computed.sqltext, autogen_context, wrap_in_element=False
830
+ )
831
+
832
+ kwargs = {}
833
+ if computed.persisted is not None:
834
+ kwargs["persisted"] = computed.persisted
835
+ return "%(prefix)sComputed(%(text)s, %(kwargs)s)" % {
836
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
837
+ "text": text,
838
+ "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())),
839
+ }
840
+
841
+
842
+ def _render_identity(
843
+ identity: Identity, autogen_context: AutogenContext
844
+ ) -> str:
845
+ kwargs = sqla_compat._get_identity_options_dict(
846
+ identity, dialect_kwargs=True
847
+ )
848
+
849
+ return "%(prefix)sIdentity(%(kwargs)s)" % {
850
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
851
+ "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())),
852
+ }
853
+
854
+
855
+ def _render_fetched_value(autogen_context: AutogenContext) -> str:
856
+ return "%(prefix)sFetchedValue()" % {
857
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
858
+ }
859
+
860
+
861
+ def _repr_type(
862
+ type_: TypeEngine,
863
+ autogen_context: AutogenContext,
864
+ _skip_variants: bool = False,
865
+ ) -> str:
866
+ rendered = _user_defined_render("type", type_, autogen_context)
867
+ if rendered is not False:
868
+ return rendered
869
+
870
+ if hasattr(autogen_context.migration_context, "impl"):
871
+ impl_rt = autogen_context.migration_context.impl.render_type(
872
+ type_, autogen_context
873
+ )
874
+ else:
875
+ impl_rt = None
876
+
877
+ mod = type(type_).__module__
878
+ imports = autogen_context.imports
879
+
880
+ if not _skip_variants and sqla_compat._type_has_variants(type_):
881
+ return _render_Variant_type(type_, autogen_context)
882
+ elif mod.startswith("sqlalchemy.dialects"):
883
+ match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod)
884
+ assert match is not None
885
+ dname = match.group(1)
886
+ if imports is not None:
887
+ imports.add("from sqlalchemy.dialects import %s" % dname)
888
+ if impl_rt:
889
+ return impl_rt
890
+ else:
891
+ return "%s.%r" % (dname, type_)
892
+ elif impl_rt:
893
+ return impl_rt
894
+ elif mod.startswith("sqlalchemy."):
895
+ if "_render_%s_type" % type_.__visit_name__ in globals():
896
+ fn = globals()["_render_%s_type" % type_.__visit_name__]
897
+ return fn(type_, autogen_context)
898
+ else:
899
+ prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
900
+ return "%s%r" % (prefix, type_)
901
+ else:
902
+ prefix = _user_autogenerate_prefix(autogen_context, type_)
903
+ return "%s%r" % (prefix, type_)
904
+
905
+
906
+ def _render_ARRAY_type(type_: ARRAY, autogen_context: AutogenContext) -> str:
907
+ return cast(
908
+ str,
909
+ _render_type_w_subtype(
910
+ type_, autogen_context, "item_type", r"(.+?\()"
911
+ ),
912
+ )
913
+
914
+
915
+ def _render_Variant_type(
916
+ type_: TypeEngine, autogen_context: AutogenContext
917
+ ) -> str:
918
+ base_type, variant_mapping = sqla_compat._get_variant_mapping(type_)
919
+ base = _repr_type(base_type, autogen_context, _skip_variants=True)
920
+ assert base is not None and base is not False # type: ignore[comparison-overlap] # noqa:E501
921
+ for dialect in sorted(variant_mapping):
922
+ typ = variant_mapping[dialect]
923
+ base += ".with_variant(%s, %r)" % (
924
+ _repr_type(typ, autogen_context, _skip_variants=True),
925
+ dialect,
926
+ )
927
+ return base
928
+
929
+
930
+ def _render_type_w_subtype(
931
+ type_: TypeEngine,
932
+ autogen_context: AutogenContext,
933
+ attrname: str,
934
+ regexp: str,
935
+ prefix: Optional[str] = None,
936
+ ) -> Union[Optional[str], Literal[False]]:
937
+ outer_repr = repr(type_)
938
+ inner_type = getattr(type_, attrname, None)
939
+ if inner_type is None:
940
+ return False
941
+
942
+ inner_repr = repr(inner_type)
943
+
944
+ inner_repr = re.sub(r"([\(\)])", r"\\\1", inner_repr)
945
+ sub_type = _repr_type(getattr(type_, attrname), autogen_context)
946
+ outer_type = re.sub(regexp + inner_repr, r"\1%s" % sub_type, outer_repr)
947
+
948
+ if prefix:
949
+ return "%s%s" % (prefix, outer_type)
950
+
951
+ mod = type(type_).__module__
952
+ if mod.startswith("sqlalchemy.dialects"):
953
+ match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod)
954
+ assert match is not None
955
+ dname = match.group(1)
956
+ return "%s.%s" % (dname, outer_type)
957
+ elif mod.startswith("sqlalchemy"):
958
+ prefix = _sqlalchemy_autogenerate_prefix(autogen_context)
959
+ return "%s%s" % (prefix, outer_type)
960
+ else:
961
+ return None
962
+
963
+
964
+ _constraint_renderers = util.Dispatcher()
965
+
966
+
967
+ def _render_constraint(
968
+ constraint: Constraint,
969
+ autogen_context: AutogenContext,
970
+ namespace_metadata: Optional[MetaData],
971
+ ) -> Optional[str]:
972
+ try:
973
+ renderer = _constraint_renderers.dispatch(constraint)
974
+ except ValueError:
975
+ util.warn("No renderer is established for object %r" % constraint)
976
+ return "[Unknown Python object %r]" % constraint
977
+ else:
978
+ return renderer(constraint, autogen_context, namespace_metadata)
979
+
980
+
981
+ @_constraint_renderers.dispatch_for(sa_schema.PrimaryKeyConstraint)
982
+ def _render_primary_key(
983
+ constraint: PrimaryKeyConstraint,
984
+ autogen_context: AutogenContext,
985
+ namespace_metadata: Optional[MetaData],
986
+ ) -> Optional[str]:
987
+ rendered = _user_defined_render("primary_key", constraint, autogen_context)
988
+ if rendered is not False:
989
+ return rendered
990
+
991
+ if not constraint.columns:
992
+ return None
993
+
994
+ opts = []
995
+ if constraint.name:
996
+ opts.append(
997
+ ("name", repr(_render_gen_name(autogen_context, constraint.name)))
998
+ )
999
+ return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % {
1000
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
1001
+ "args": ", ".join(
1002
+ [repr(c.name) for c in constraint.columns]
1003
+ + ["%s=%s" % (kwname, val) for kwname, val in opts]
1004
+ ),
1005
+ }
1006
+
1007
+
1008
+ def _fk_colspec(
1009
+ fk: ForeignKey,
1010
+ metadata_schema: Optional[str],
1011
+ namespace_metadata: Optional[MetaData],
1012
+ ) -> str:
1013
+ """Implement a 'safe' version of ForeignKey._get_colspec() that
1014
+ won't fail if the remote table can't be resolved.
1015
+
1016
+ """
1017
+ colspec = fk._get_colspec()
1018
+ tokens = colspec.split(".")
1019
+ tname, colname = tokens[-2:]
1020
+
1021
+ if metadata_schema is not None and len(tokens) == 2:
1022
+ table_fullname = "%s.%s" % (metadata_schema, tname)
1023
+ else:
1024
+ table_fullname = ".".join(tokens[0:-1])
1025
+
1026
+ if (
1027
+ not fk.link_to_name
1028
+ and fk.parent is not None
1029
+ and fk.parent.table is not None
1030
+ ):
1031
+ # try to resolve the remote table in order to adjust for column.key.
1032
+ # the FK constraint needs to be rendered in terms of the column
1033
+ # name.
1034
+
1035
+ if (
1036
+ namespace_metadata is not None
1037
+ and table_fullname in namespace_metadata.tables
1038
+ ):
1039
+ col = namespace_metadata.tables[table_fullname].c.get(colname)
1040
+ if col is not None:
1041
+ colname = _ident(col.name) # type: ignore[assignment]
1042
+
1043
+ colspec = "%s.%s" % (table_fullname, colname)
1044
+
1045
+ return colspec
1046
+
1047
+
1048
+ def _populate_render_fk_opts(
1049
+ constraint: ForeignKeyConstraint, opts: List[Tuple[str, str]]
1050
+ ) -> None:
1051
+ if constraint.onupdate:
1052
+ opts.append(("onupdate", repr(constraint.onupdate)))
1053
+ if constraint.ondelete:
1054
+ opts.append(("ondelete", repr(constraint.ondelete)))
1055
+ if constraint.initially:
1056
+ opts.append(("initially", repr(constraint.initially)))
1057
+ if constraint.deferrable:
1058
+ opts.append(("deferrable", repr(constraint.deferrable)))
1059
+ if constraint.use_alter:
1060
+ opts.append(("use_alter", repr(constraint.use_alter)))
1061
+ if constraint.match:
1062
+ opts.append(("match", repr(constraint.match)))
1063
+
1064
+
1065
+ @_constraint_renderers.dispatch_for(sa_schema.ForeignKeyConstraint)
1066
+ def _render_foreign_key(
1067
+ constraint: ForeignKeyConstraint,
1068
+ autogen_context: AutogenContext,
1069
+ namespace_metadata: Optional[MetaData],
1070
+ ) -> Optional[str]:
1071
+ rendered = _user_defined_render("foreign_key", constraint, autogen_context)
1072
+ if rendered is not False:
1073
+ return rendered
1074
+
1075
+ opts = []
1076
+ if constraint.name:
1077
+ opts.append(
1078
+ ("name", repr(_render_gen_name(autogen_context, constraint.name)))
1079
+ )
1080
+
1081
+ _populate_render_fk_opts(constraint, opts)
1082
+
1083
+ apply_metadata_schema = (
1084
+ namespace_metadata.schema if namespace_metadata is not None else None
1085
+ )
1086
+ return (
1087
+ "%(prefix)sForeignKeyConstraint([%(cols)s], "
1088
+ "[%(refcols)s], %(args)s)"
1089
+ % {
1090
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
1091
+ "cols": ", ".join(
1092
+ repr(_ident(f.parent.name)) for f in constraint.elements
1093
+ ),
1094
+ "refcols": ", ".join(
1095
+ repr(_fk_colspec(f, apply_metadata_schema, namespace_metadata))
1096
+ for f in constraint.elements
1097
+ ),
1098
+ "args": ", ".join(
1099
+ ["%s=%s" % (kwname, val) for kwname, val in opts]
1100
+ ),
1101
+ }
1102
+ )
1103
+
1104
+
1105
+ @_constraint_renderers.dispatch_for(sa_schema.UniqueConstraint)
1106
+ def _render_unique_constraint(
1107
+ constraint: UniqueConstraint,
1108
+ autogen_context: AutogenContext,
1109
+ namespace_metadata: Optional[MetaData],
1110
+ ) -> str:
1111
+ rendered = _user_defined_render("unique", constraint, autogen_context)
1112
+ if rendered is not False:
1113
+ return rendered
1114
+
1115
+ return _uq_constraint(constraint, autogen_context, False)
1116
+
1117
+
1118
+ @_constraint_renderers.dispatch_for(sa_schema.CheckConstraint)
1119
+ def _render_check_constraint(
1120
+ constraint: CheckConstraint,
1121
+ autogen_context: AutogenContext,
1122
+ namespace_metadata: Optional[MetaData],
1123
+ ) -> Optional[str]:
1124
+ rendered = _user_defined_render("check", constraint, autogen_context)
1125
+ if rendered is not False:
1126
+ return rendered
1127
+
1128
+ # detect the constraint being part of
1129
+ # a parent type which is probably in the Table already.
1130
+ # ideally SQLAlchemy would give us more of a first class
1131
+ # way to detect this.
1132
+ if (
1133
+ constraint._create_rule
1134
+ and hasattr(constraint._create_rule, "target")
1135
+ and isinstance(
1136
+ constraint._create_rule.target,
1137
+ sqltypes.TypeEngine,
1138
+ )
1139
+ ):
1140
+ return None
1141
+ opts = []
1142
+ if constraint.name:
1143
+ opts.append(
1144
+ ("name", repr(_render_gen_name(autogen_context, constraint.name)))
1145
+ )
1146
+ return "%(prefix)sCheckConstraint(%(sqltext)s%(opts)s)" % {
1147
+ "prefix": _sqlalchemy_autogenerate_prefix(autogen_context),
1148
+ "opts": (
1149
+ ", " + (", ".join("%s=%s" % (k, v) for k, v in opts))
1150
+ if opts
1151
+ else ""
1152
+ ),
1153
+ "sqltext": _render_potential_expr(
1154
+ constraint.sqltext, autogen_context, wrap_in_element=False
1155
+ ),
1156
+ }
1157
+
1158
+
1159
+ @renderers.dispatch_for(ops.ExecuteSQLOp)
1160
+ def _execute_sql(autogen_context: AutogenContext, op: ops.ExecuteSQLOp) -> str:
1161
+ if not isinstance(op.sqltext, str):
1162
+ raise NotImplementedError(
1163
+ "Autogenerate rendering of SQL Expression language constructs "
1164
+ "not supported here; please use a plain SQL string"
1165
+ )
1166
+ return "{prefix}execute({sqltext!r})".format(
1167
+ prefix=_alembic_autogenerate_prefix(autogen_context),
1168
+ sqltext=op.sqltext,
1169
+ )
1170
+
1171
+
1172
+ renderers = default_renderers.branch()
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/autogenerate/rewriter.py ADDED
@@ -0,0 +1,240 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+ from typing import Callable
5
+ from typing import Iterator
6
+ from typing import List
7
+ from typing import Tuple
8
+ from typing import Type
9
+ from typing import TYPE_CHECKING
10
+ from typing import Union
11
+
12
+ from .. import util
13
+ from ..operations import ops
14
+
15
+ if TYPE_CHECKING:
16
+ from ..operations.ops import AddColumnOp
17
+ from ..operations.ops import AlterColumnOp
18
+ from ..operations.ops import CreateTableOp
19
+ from ..operations.ops import DowngradeOps
20
+ from ..operations.ops import MigrateOperation
21
+ from ..operations.ops import MigrationScript
22
+ from ..operations.ops import ModifyTableOps
23
+ from ..operations.ops import OpContainer
24
+ from ..operations.ops import UpgradeOps
25
+ from ..runtime.migration import MigrationContext
26
+ from ..script.revision import _GetRevArg
27
+
28
+ ProcessRevisionDirectiveFn = Callable[
29
+ ["MigrationContext", "_GetRevArg", List["MigrationScript"]], None
30
+ ]
31
+
32
+
33
+ class Rewriter:
34
+ """A helper object that allows easy 'rewriting' of ops streams.
35
+
36
+ The :class:`.Rewriter` object is intended to be passed along
37
+ to the
38
+ :paramref:`.EnvironmentContext.configure.process_revision_directives`
39
+ parameter in an ``env.py`` script. Once constructed, any number
40
+ of "rewrites" functions can be associated with it, which will be given
41
+ the opportunity to modify the structure without having to have explicit
42
+ knowledge of the overall structure.
43
+
44
+ The function is passed the :class:`.MigrationContext` object and
45
+ ``revision`` tuple that are passed to the :paramref:`.Environment
46
+ Context.configure.process_revision_directives` function normally,
47
+ and the third argument is an individual directive of the type
48
+ noted in the decorator. The function has the choice of returning
49
+ a single op directive, which normally can be the directive that
50
+ was actually passed, or a new directive to replace it, or a list
51
+ of zero or more directives to replace it.
52
+
53
+ .. seealso::
54
+
55
+ :ref:`autogen_rewriter` - usage example
56
+
57
+ """
58
+
59
+ _traverse = util.Dispatcher()
60
+
61
+ _chained: Tuple[Union[ProcessRevisionDirectiveFn, Rewriter], ...] = ()
62
+
63
+ def __init__(self) -> None:
64
+ self.dispatch = util.Dispatcher()
65
+
66
+ def chain(
67
+ self,
68
+ other: Union[
69
+ ProcessRevisionDirectiveFn,
70
+ Rewriter,
71
+ ],
72
+ ) -> Rewriter:
73
+ """Produce a "chain" of this :class:`.Rewriter` to another.
74
+
75
+ This allows two or more rewriters to operate serially on a stream,
76
+ e.g.::
77
+
78
+ writer1 = autogenerate.Rewriter()
79
+ writer2 = autogenerate.Rewriter()
80
+
81
+
82
+ @writer1.rewrites(ops.AddColumnOp)
83
+ def add_column_nullable(context, revision, op):
84
+ op.column.nullable = True
85
+ return op
86
+
87
+
88
+ @writer2.rewrites(ops.AddColumnOp)
89
+ def add_column_idx(context, revision, op):
90
+ idx_op = ops.CreateIndexOp(
91
+ "ixc", op.table_name, [op.column.name]
92
+ )
93
+ return [op, idx_op]
94
+
95
+ writer = writer1.chain(writer2)
96
+
97
+ :param other: a :class:`.Rewriter` instance
98
+ :return: a new :class:`.Rewriter` that will run the operations
99
+ of this writer, then the "other" writer, in succession.
100
+
101
+ """
102
+ wr = self.__class__.__new__(self.__class__)
103
+ wr.__dict__.update(self.__dict__)
104
+ wr._chained += (other,)
105
+ return wr
106
+
107
+ def rewrites(
108
+ self,
109
+ operator: Union[
110
+ Type[AddColumnOp],
111
+ Type[MigrateOperation],
112
+ Type[AlterColumnOp],
113
+ Type[CreateTableOp],
114
+ Type[ModifyTableOps],
115
+ ],
116
+ ) -> Callable[..., Any]:
117
+ """Register a function as rewriter for a given type.
118
+
119
+ The function should receive three arguments, which are
120
+ the :class:`.MigrationContext`, a ``revision`` tuple, and
121
+ an op directive of the type indicated. E.g.::
122
+
123
+ @writer1.rewrites(ops.AddColumnOp)
124
+ def add_column_nullable(context, revision, op):
125
+ op.column.nullable = True
126
+ return op
127
+
128
+ """
129
+ return self.dispatch.dispatch_for(operator)
130
+
131
+ def _rewrite(
132
+ self,
133
+ context: MigrationContext,
134
+ revision: _GetRevArg,
135
+ directive: MigrateOperation,
136
+ ) -> Iterator[MigrateOperation]:
137
+ try:
138
+ _rewriter = self.dispatch.dispatch(directive)
139
+ except ValueError:
140
+ _rewriter = None
141
+ yield directive
142
+ else:
143
+ if self in directive._mutations:
144
+ yield directive
145
+ else:
146
+ for r_directive in util.to_list(
147
+ _rewriter(context, revision, directive), []
148
+ ):
149
+ r_directive._mutations = r_directive._mutations.union(
150
+ [self]
151
+ )
152
+ yield r_directive
153
+
154
+ def __call__(
155
+ self,
156
+ context: MigrationContext,
157
+ revision: _GetRevArg,
158
+ directives: List[MigrationScript],
159
+ ) -> None:
160
+ self.process_revision_directives(context, revision, directives)
161
+ for process_revision_directives in self._chained:
162
+ process_revision_directives(context, revision, directives)
163
+
164
+ @_traverse.dispatch_for(ops.MigrationScript)
165
+ def _traverse_script(
166
+ self,
167
+ context: MigrationContext,
168
+ revision: _GetRevArg,
169
+ directive: MigrationScript,
170
+ ) -> None:
171
+ upgrade_ops_list: List[UpgradeOps] = []
172
+ for upgrade_ops in directive.upgrade_ops_list:
173
+ ret = self._traverse_for(context, revision, upgrade_ops)
174
+ if len(ret) != 1:
175
+ raise ValueError(
176
+ "Can only return single object for UpgradeOps traverse"
177
+ )
178
+ upgrade_ops_list.append(ret[0])
179
+
180
+ directive.upgrade_ops = upgrade_ops_list
181
+
182
+ downgrade_ops_list: List[DowngradeOps] = []
183
+ for downgrade_ops in directive.downgrade_ops_list:
184
+ ret = self._traverse_for(context, revision, downgrade_ops)
185
+ if len(ret) != 1:
186
+ raise ValueError(
187
+ "Can only return single object for DowngradeOps traverse"
188
+ )
189
+ downgrade_ops_list.append(ret[0])
190
+ directive.downgrade_ops = downgrade_ops_list
191
+
192
+ @_traverse.dispatch_for(ops.OpContainer)
193
+ def _traverse_op_container(
194
+ self,
195
+ context: MigrationContext,
196
+ revision: _GetRevArg,
197
+ directive: OpContainer,
198
+ ) -> None:
199
+ self._traverse_list(context, revision, directive.ops)
200
+
201
+ @_traverse.dispatch_for(ops.MigrateOperation)
202
+ def _traverse_any_directive(
203
+ self,
204
+ context: MigrationContext,
205
+ revision: _GetRevArg,
206
+ directive: MigrateOperation,
207
+ ) -> None:
208
+ pass
209
+
210
+ def _traverse_for(
211
+ self,
212
+ context: MigrationContext,
213
+ revision: _GetRevArg,
214
+ directive: MigrateOperation,
215
+ ) -> Any:
216
+ directives = list(self._rewrite(context, revision, directive))
217
+ for directive in directives:
218
+ traverser = self._traverse.dispatch(directive)
219
+ traverser(self, context, revision, directive)
220
+ return directives
221
+
222
+ def _traverse_list(
223
+ self,
224
+ context: MigrationContext,
225
+ revision: _GetRevArg,
226
+ directives: Any,
227
+ ) -> None:
228
+ dest = []
229
+ for directive in directives:
230
+ dest.extend(self._traverse_for(context, revision, directive))
231
+
232
+ directives[:] = dest
233
+
234
+ def process_revision_directives(
235
+ self,
236
+ context: MigrationContext,
237
+ revision: _GetRevArg,
238
+ directives: List[MigrationScript],
239
+ ) -> None:
240
+ self._traverse_list(context, revision, directives)
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/command.py ADDED
@@ -0,0 +1,835 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-untyped-calls
2
+
3
+ from __future__ import annotations
4
+
5
+ import os
6
+ import pathlib
7
+ from typing import List
8
+ from typing import Optional
9
+ from typing import TYPE_CHECKING
10
+ from typing import Union
11
+
12
+ from . import autogenerate as autogen
13
+ from . import util
14
+ from .runtime.environment import EnvironmentContext
15
+ from .script import ScriptDirectory
16
+ from .util import compat
17
+
18
+ if TYPE_CHECKING:
19
+ from alembic.config import Config
20
+ from alembic.script.base import Script
21
+ from alembic.script.revision import _RevIdType
22
+ from .runtime.environment import ProcessRevisionDirectiveFn
23
+
24
+
25
+ def list_templates(config: Config) -> None:
26
+ """List available templates.
27
+
28
+ :param config: a :class:`.Config` object.
29
+
30
+ """
31
+
32
+ config.print_stdout("Available templates:\n")
33
+ for tempname in config._get_template_path().iterdir():
34
+ with (tempname / "README").open() as readme:
35
+ synopsis = next(readme).rstrip()
36
+ config.print_stdout("%s - %s", tempname.name, synopsis)
37
+
38
+ config.print_stdout("\nTemplates are used via the 'init' command, e.g.:")
39
+ config.print_stdout("\n alembic init --template generic ./scripts")
40
+
41
+
42
+ def init(
43
+ config: Config,
44
+ directory: str,
45
+ template: str = "generic",
46
+ package: bool = False,
47
+ ) -> None:
48
+ """Initialize a new scripts directory.
49
+
50
+ :param config: a :class:`.Config` object.
51
+
52
+ :param directory: string path of the target directory.
53
+
54
+ :param template: string name of the migration environment template to
55
+ use.
56
+
57
+ :param package: when True, write ``__init__.py`` files into the
58
+ environment location as well as the versions/ location.
59
+
60
+ """
61
+
62
+ directory_path = pathlib.Path(directory)
63
+ if directory_path.exists() and list(directory_path.iterdir()):
64
+ raise util.CommandError(
65
+ "Directory %s already exists and is not empty" % directory_path
66
+ )
67
+
68
+ template_path = config._get_template_path() / template
69
+
70
+ if not template_path.exists():
71
+ raise util.CommandError(f"No such template {template_path}")
72
+
73
+ # left as os.access() to suit unit test mocking
74
+ if not os.access(directory_path, os.F_OK):
75
+ with util.status(
76
+ f"Creating directory {directory_path.absolute()}",
77
+ **config.messaging_opts,
78
+ ):
79
+ os.makedirs(directory_path)
80
+
81
+ versions = directory_path / "versions"
82
+ with util.status(
83
+ f"Creating directory {versions.absolute()}",
84
+ **config.messaging_opts,
85
+ ):
86
+ os.makedirs(versions)
87
+
88
+ if not directory_path.is_absolute():
89
+ # for non-absolute path, state config file in .ini / pyproject
90
+ # as relative to the %(here)s token, which is where the config
91
+ # file itself would be
92
+
93
+ if config._config_file_path is not None:
94
+ rel_dir = compat.path_relative_to(
95
+ directory_path.absolute(),
96
+ config._config_file_path.absolute().parent,
97
+ walk_up=True,
98
+ )
99
+ ini_script_location_directory = ("%(here)s" / rel_dir).as_posix()
100
+ if config._toml_file_path is not None:
101
+ rel_dir = compat.path_relative_to(
102
+ directory_path.absolute(),
103
+ config._toml_file_path.absolute().parent,
104
+ walk_up=True,
105
+ )
106
+ toml_script_location_directory = ("%(here)s" / rel_dir).as_posix()
107
+
108
+ else:
109
+ ini_script_location_directory = directory_path.as_posix()
110
+ toml_script_location_directory = directory_path.as_posix()
111
+
112
+ script = ScriptDirectory(directory_path)
113
+
114
+ has_toml = False
115
+
116
+ config_file: pathlib.Path | None = None
117
+
118
+ for file_path in template_path.iterdir():
119
+ file_ = file_path.name
120
+ if file_ == "alembic.ini.mako":
121
+ assert config.config_file_name is not None
122
+ config_file = pathlib.Path(config.config_file_name).absolute()
123
+ if config_file.exists():
124
+ util.msg(
125
+ f"File {config_file} already exists, skipping",
126
+ **config.messaging_opts,
127
+ )
128
+ else:
129
+ script._generate_template(
130
+ file_path,
131
+ config_file,
132
+ script_location=ini_script_location_directory,
133
+ )
134
+ elif file_ == "pyproject.toml.mako":
135
+ has_toml = True
136
+ assert config._toml_file_path is not None
137
+ toml_path = config._toml_file_path.absolute()
138
+
139
+ if toml_path.exists():
140
+ # left as open() to suit unit test mocking
141
+ with open(toml_path, "rb") as f:
142
+ toml_data = compat.tomllib.load(f)
143
+ if "tool" in toml_data and "alembic" in toml_data["tool"]:
144
+
145
+ util.msg(
146
+ f"File {toml_path} already exists "
147
+ "and already has a [tool.alembic] section, "
148
+ "skipping",
149
+ )
150
+ continue
151
+ script._append_template(
152
+ file_path,
153
+ toml_path,
154
+ script_location=toml_script_location_directory,
155
+ )
156
+ else:
157
+ script._generate_template(
158
+ file_path,
159
+ toml_path,
160
+ script_location=toml_script_location_directory,
161
+ )
162
+
163
+ elif file_path.is_file():
164
+ output_file = directory_path / file_
165
+ script._copy_file(file_path, output_file)
166
+
167
+ if package:
168
+ for path in [
169
+ directory_path.absolute() / "__init__.py",
170
+ versions.absolute() / "__init__.py",
171
+ ]:
172
+ with util.status(f"Adding {path!s}", **config.messaging_opts):
173
+ # left as open() to suit unit test mocking
174
+ with open(path, "w"):
175
+ pass
176
+
177
+ assert config_file is not None
178
+
179
+ if has_toml:
180
+ util.msg(
181
+ f"Please edit configuration settings in {toml_path} and "
182
+ "configuration/connection/logging "
183
+ f"settings in {config_file} before proceeding.",
184
+ **config.messaging_opts,
185
+ )
186
+ else:
187
+ util.msg(
188
+ "Please edit configuration/connection/logging "
189
+ f"settings in {config_file} before proceeding.",
190
+ **config.messaging_opts,
191
+ )
192
+
193
+
194
+ def revision(
195
+ config: Config,
196
+ message: Optional[str] = None,
197
+ autogenerate: bool = False,
198
+ sql: bool = False,
199
+ head: str = "head",
200
+ splice: bool = False,
201
+ branch_label: Optional[_RevIdType] = None,
202
+ version_path: Union[str, os.PathLike[str], None] = None,
203
+ rev_id: Optional[str] = None,
204
+ depends_on: Optional[str] = None,
205
+ process_revision_directives: Optional[ProcessRevisionDirectiveFn] = None,
206
+ ) -> Union[Optional[Script], List[Optional[Script]]]:
207
+ """Create a new revision file.
208
+
209
+ :param config: a :class:`.Config` object.
210
+
211
+ :param message: string message to apply to the revision; this is the
212
+ ``-m`` option to ``alembic revision``.
213
+
214
+ :param autogenerate: whether or not to autogenerate the script from
215
+ the database; this is the ``--autogenerate`` option to
216
+ ``alembic revision``.
217
+
218
+ :param sql: whether to dump the script out as a SQL string; when specified,
219
+ the script is dumped to stdout. This is the ``--sql`` option to
220
+ ``alembic revision``.
221
+
222
+ :param head: head revision to build the new revision upon as a parent;
223
+ this is the ``--head`` option to ``alembic revision``.
224
+
225
+ :param splice: whether or not the new revision should be made into a
226
+ new head of its own; is required when the given ``head`` is not itself
227
+ a head. This is the ``--splice`` option to ``alembic revision``.
228
+
229
+ :param branch_label: string label to apply to the branch; this is the
230
+ ``--branch-label`` option to ``alembic revision``.
231
+
232
+ :param version_path: string symbol identifying a specific version path
233
+ from the configuration; this is the ``--version-path`` option to
234
+ ``alembic revision``.
235
+
236
+ :param rev_id: optional revision identifier to use instead of having
237
+ one generated; this is the ``--rev-id`` option to ``alembic revision``.
238
+
239
+ :param depends_on: optional list of "depends on" identifiers; this is the
240
+ ``--depends-on`` option to ``alembic revision``.
241
+
242
+ :param process_revision_directives: this is a callable that takes the
243
+ same form as the callable described at
244
+ :paramref:`.EnvironmentContext.configure.process_revision_directives`;
245
+ will be applied to the structure generated by the revision process
246
+ where it can be altered programmatically. Note that unlike all
247
+ the other parameters, this option is only available via programmatic
248
+ use of :func:`.command.revision`.
249
+
250
+ """
251
+
252
+ script_directory = ScriptDirectory.from_config(config)
253
+
254
+ command_args = dict(
255
+ message=message,
256
+ autogenerate=autogenerate,
257
+ sql=sql,
258
+ head=head,
259
+ splice=splice,
260
+ branch_label=branch_label,
261
+ version_path=version_path,
262
+ rev_id=rev_id,
263
+ depends_on=depends_on,
264
+ )
265
+ revision_context = autogen.RevisionContext(
266
+ config,
267
+ script_directory,
268
+ command_args,
269
+ process_revision_directives=process_revision_directives,
270
+ )
271
+
272
+ environment = util.asbool(
273
+ config.get_alembic_option("revision_environment")
274
+ )
275
+
276
+ if autogenerate:
277
+ environment = True
278
+
279
+ if sql:
280
+ raise util.CommandError(
281
+ "Using --sql with --autogenerate does not make any sense"
282
+ )
283
+
284
+ def retrieve_migrations(rev, context):
285
+ revision_context.run_autogenerate(rev, context)
286
+ return []
287
+
288
+ elif environment:
289
+
290
+ def retrieve_migrations(rev, context):
291
+ revision_context.run_no_autogenerate(rev, context)
292
+ return []
293
+
294
+ elif sql:
295
+ raise util.CommandError(
296
+ "Using --sql with the revision command when "
297
+ "revision_environment is not configured does not make any sense"
298
+ )
299
+
300
+ if environment:
301
+ with EnvironmentContext(
302
+ config,
303
+ script_directory,
304
+ fn=retrieve_migrations,
305
+ as_sql=sql,
306
+ template_args=revision_context.template_args,
307
+ revision_context=revision_context,
308
+ ):
309
+ script_directory.run_env()
310
+
311
+ # the revision_context now has MigrationScript structure(s) present.
312
+ # these could theoretically be further processed / rewritten *here*,
313
+ # in addition to the hooks present within each run_migrations() call,
314
+ # or at the end of env.py run_migrations_online().
315
+
316
+ scripts = [script for script in revision_context.generate_scripts()]
317
+ if len(scripts) == 1:
318
+ return scripts[0]
319
+ else:
320
+ return scripts
321
+
322
+
323
+ def check(config: "Config") -> None:
324
+ """Check if revision command with autogenerate has pending upgrade ops.
325
+
326
+ :param config: a :class:`.Config` object.
327
+
328
+ .. versionadded:: 1.9.0
329
+
330
+ """
331
+
332
+ script_directory = ScriptDirectory.from_config(config)
333
+
334
+ command_args = dict(
335
+ message=None,
336
+ autogenerate=True,
337
+ sql=False,
338
+ head="head",
339
+ splice=False,
340
+ branch_label=None,
341
+ version_path=None,
342
+ rev_id=None,
343
+ depends_on=None,
344
+ )
345
+ revision_context = autogen.RevisionContext(
346
+ config,
347
+ script_directory,
348
+ command_args,
349
+ )
350
+
351
+ def retrieve_migrations(rev, context):
352
+ revision_context.run_autogenerate(rev, context)
353
+ return []
354
+
355
+ with EnvironmentContext(
356
+ config,
357
+ script_directory,
358
+ fn=retrieve_migrations,
359
+ as_sql=False,
360
+ template_args=revision_context.template_args,
361
+ revision_context=revision_context,
362
+ ):
363
+ script_directory.run_env()
364
+
365
+ # the revision_context now has MigrationScript structure(s) present.
366
+
367
+ migration_script = revision_context.generated_revisions[-1]
368
+ diffs = []
369
+ for upgrade_ops in migration_script.upgrade_ops_list:
370
+ diffs.extend(upgrade_ops.as_diffs())
371
+
372
+ if diffs:
373
+ raise util.AutogenerateDiffsDetected(
374
+ f"New upgrade operations detected: {diffs}",
375
+ revision_context=revision_context,
376
+ diffs=diffs,
377
+ )
378
+ else:
379
+ config.print_stdout("No new upgrade operations detected.")
380
+
381
+
382
+ def merge(
383
+ config: Config,
384
+ revisions: _RevIdType,
385
+ message: Optional[str] = None,
386
+ branch_label: Optional[_RevIdType] = None,
387
+ rev_id: Optional[str] = None,
388
+ ) -> Optional[Script]:
389
+ """Merge two revisions together. Creates a new migration file.
390
+
391
+ :param config: a :class:`.Config` instance
392
+
393
+ :param revisions: The revisions to merge.
394
+
395
+ :param message: string message to apply to the revision.
396
+
397
+ :param branch_label: string label name to apply to the new revision.
398
+
399
+ :param rev_id: hardcoded revision identifier instead of generating a new
400
+ one.
401
+
402
+ .. seealso::
403
+
404
+ :ref:`branches`
405
+
406
+ """
407
+
408
+ script = ScriptDirectory.from_config(config)
409
+ template_args = {
410
+ "config": config # Let templates use config for
411
+ # e.g. multiple databases
412
+ }
413
+
414
+ environment = util.asbool(
415
+ config.get_alembic_option("revision_environment")
416
+ )
417
+
418
+ if environment:
419
+
420
+ def nothing(rev, context):
421
+ return []
422
+
423
+ with EnvironmentContext(
424
+ config,
425
+ script,
426
+ fn=nothing,
427
+ as_sql=False,
428
+ template_args=template_args,
429
+ ):
430
+ script.run_env()
431
+
432
+ return script.generate_revision(
433
+ rev_id or util.rev_id(),
434
+ message,
435
+ refresh=True,
436
+ head=revisions,
437
+ branch_labels=branch_label,
438
+ **template_args, # type:ignore[arg-type]
439
+ )
440
+
441
+
442
+ def upgrade(
443
+ config: Config,
444
+ revision: str,
445
+ sql: bool = False,
446
+ tag: Optional[str] = None,
447
+ ) -> None:
448
+ """Upgrade to a later version.
449
+
450
+ :param config: a :class:`.Config` instance.
451
+
452
+ :param revision: string revision target or range for --sql mode. May be
453
+ ``"heads"`` to target the most recent revision(s).
454
+
455
+ :param sql: if True, use ``--sql`` mode.
456
+
457
+ :param tag: an arbitrary "tag" that can be intercepted by custom
458
+ ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument`
459
+ method.
460
+
461
+ """
462
+
463
+ script = ScriptDirectory.from_config(config)
464
+
465
+ starting_rev = None
466
+ if ":" in revision:
467
+ if not sql:
468
+ raise util.CommandError("Range revision not allowed")
469
+ starting_rev, revision = revision.split(":", 2)
470
+
471
+ def upgrade(rev, context):
472
+ return script._upgrade_revs(revision, rev)
473
+
474
+ with EnvironmentContext(
475
+ config,
476
+ script,
477
+ fn=upgrade,
478
+ as_sql=sql,
479
+ starting_rev=starting_rev,
480
+ destination_rev=revision,
481
+ tag=tag,
482
+ ):
483
+ script.run_env()
484
+
485
+
486
+ def downgrade(
487
+ config: Config,
488
+ revision: str,
489
+ sql: bool = False,
490
+ tag: Optional[str] = None,
491
+ ) -> None:
492
+ """Revert to a previous version.
493
+
494
+ :param config: a :class:`.Config` instance.
495
+
496
+ :param revision: string revision target or range for --sql mode. May
497
+ be ``"base"`` to target the first revision.
498
+
499
+ :param sql: if True, use ``--sql`` mode.
500
+
501
+ :param tag: an arbitrary "tag" that can be intercepted by custom
502
+ ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument`
503
+ method.
504
+
505
+ """
506
+
507
+ script = ScriptDirectory.from_config(config)
508
+ starting_rev = None
509
+ if ":" in revision:
510
+ if not sql:
511
+ raise util.CommandError("Range revision not allowed")
512
+ starting_rev, revision = revision.split(":", 2)
513
+ elif sql:
514
+ raise util.CommandError(
515
+ "downgrade with --sql requires <fromrev>:<torev>"
516
+ )
517
+
518
+ def downgrade(rev, context):
519
+ return script._downgrade_revs(revision, rev)
520
+
521
+ with EnvironmentContext(
522
+ config,
523
+ script,
524
+ fn=downgrade,
525
+ as_sql=sql,
526
+ starting_rev=starting_rev,
527
+ destination_rev=revision,
528
+ tag=tag,
529
+ ):
530
+ script.run_env()
531
+
532
+
533
+ def show(config: Config, rev: str) -> None:
534
+ """Show the revision(s) denoted by the given symbol.
535
+
536
+ :param config: a :class:`.Config` instance.
537
+
538
+ :param rev: string revision target. May be ``"current"`` to show the
539
+ revision(s) currently applied in the database.
540
+
541
+ """
542
+
543
+ script = ScriptDirectory.from_config(config)
544
+
545
+ if rev == "current":
546
+
547
+ def show_current(rev, context):
548
+ for sc in script.get_revisions(rev):
549
+ config.print_stdout(sc.log_entry)
550
+ return []
551
+
552
+ with EnvironmentContext(config, script, fn=show_current):
553
+ script.run_env()
554
+ else:
555
+ for sc in script.get_revisions(rev):
556
+ config.print_stdout(sc.log_entry)
557
+
558
+
559
+ def history(
560
+ config: Config,
561
+ rev_range: Optional[str] = None,
562
+ verbose: bool = False,
563
+ indicate_current: bool = False,
564
+ ) -> None:
565
+ """List changeset scripts in chronological order.
566
+
567
+ :param config: a :class:`.Config` instance.
568
+
569
+ :param rev_range: string revision range.
570
+
571
+ :param verbose: output in verbose mode.
572
+
573
+ :param indicate_current: indicate current revision.
574
+
575
+ """
576
+ base: Optional[str]
577
+ head: Optional[str]
578
+ script = ScriptDirectory.from_config(config)
579
+ if rev_range is not None:
580
+ if ":" not in rev_range:
581
+ raise util.CommandError(
582
+ "History range requires [start]:[end], " "[start]:, or :[end]"
583
+ )
584
+ base, head = rev_range.strip().split(":")
585
+ else:
586
+ base = head = None
587
+
588
+ environment = (
589
+ util.asbool(config.get_alembic_option("revision_environment"))
590
+ or indicate_current
591
+ )
592
+
593
+ def _display_history(config, script, base, head, currents=()):
594
+ for sc in script.walk_revisions(
595
+ base=base or "base", head=head or "heads"
596
+ ):
597
+ if indicate_current:
598
+ sc._db_current_indicator = sc.revision in currents
599
+
600
+ config.print_stdout(
601
+ sc.cmd_format(
602
+ verbose=verbose,
603
+ include_branches=True,
604
+ include_doc=True,
605
+ include_parents=True,
606
+ )
607
+ )
608
+
609
+ def _display_history_w_current(config, script, base, head):
610
+ def _display_current_history(rev, context):
611
+ if head == "current":
612
+ _display_history(config, script, base, rev, rev)
613
+ elif base == "current":
614
+ _display_history(config, script, rev, head, rev)
615
+ else:
616
+ _display_history(config, script, base, head, rev)
617
+ return []
618
+
619
+ with EnvironmentContext(config, script, fn=_display_current_history):
620
+ script.run_env()
621
+
622
+ if base == "current" or head == "current" or environment:
623
+ _display_history_w_current(config, script, base, head)
624
+ else:
625
+ _display_history(config, script, base, head)
626
+
627
+
628
+ def heads(
629
+ config: Config, verbose: bool = False, resolve_dependencies: bool = False
630
+ ) -> None:
631
+ """Show current available heads in the script directory.
632
+
633
+ :param config: a :class:`.Config` instance.
634
+
635
+ :param verbose: output in verbose mode.
636
+
637
+ :param resolve_dependencies: treat dependency version as down revisions.
638
+
639
+ """
640
+
641
+ script = ScriptDirectory.from_config(config)
642
+ if resolve_dependencies:
643
+ heads = script.get_revisions("heads")
644
+ else:
645
+ heads = script.get_revisions(script.get_heads())
646
+
647
+ for rev in heads:
648
+ config.print_stdout(
649
+ rev.cmd_format(
650
+ verbose, include_branches=True, tree_indicators=False
651
+ )
652
+ )
653
+
654
+
655
+ def branches(config: Config, verbose: bool = False) -> None:
656
+ """Show current branch points.
657
+
658
+ :param config: a :class:`.Config` instance.
659
+
660
+ :param verbose: output in verbose mode.
661
+
662
+ """
663
+ script = ScriptDirectory.from_config(config)
664
+ for sc in script.walk_revisions():
665
+ if sc.is_branch_point:
666
+ config.print_stdout(
667
+ "%s\n%s\n",
668
+ sc.cmd_format(verbose, include_branches=True),
669
+ "\n".join(
670
+ "%s -> %s"
671
+ % (
672
+ " " * len(str(sc.revision)),
673
+ rev_obj.cmd_format(
674
+ False, include_branches=True, include_doc=verbose
675
+ ),
676
+ )
677
+ for rev_obj in (
678
+ script.get_revision(rev) for rev in sc.nextrev
679
+ )
680
+ ),
681
+ )
682
+
683
+
684
+ def current(config: Config, verbose: bool = False) -> None:
685
+ """Display the current revision for a database.
686
+
687
+ :param config: a :class:`.Config` instance.
688
+
689
+ :param verbose: output in verbose mode.
690
+
691
+ """
692
+
693
+ script = ScriptDirectory.from_config(config)
694
+
695
+ def display_version(rev, context):
696
+ if verbose:
697
+ config.print_stdout(
698
+ "Current revision(s) for %s:",
699
+ util.obfuscate_url_pw(context.connection.engine.url),
700
+ )
701
+ for rev in script.get_all_current(rev):
702
+ config.print_stdout(rev.cmd_format(verbose))
703
+
704
+ return []
705
+
706
+ with EnvironmentContext(
707
+ config, script, fn=display_version, dont_mutate=True
708
+ ):
709
+ script.run_env()
710
+
711
+
712
+ def stamp(
713
+ config: Config,
714
+ revision: _RevIdType,
715
+ sql: bool = False,
716
+ tag: Optional[str] = None,
717
+ purge: bool = False,
718
+ ) -> None:
719
+ """'stamp' the revision table with the given revision; don't
720
+ run any migrations.
721
+
722
+ :param config: a :class:`.Config` instance.
723
+
724
+ :param revision: target revision or list of revisions. May be a list
725
+ to indicate stamping of multiple branch heads; may be ``"base"``
726
+ to remove all revisions from the table or ``"heads"`` to stamp the
727
+ most recent revision(s).
728
+
729
+ .. note:: this parameter is called "revisions" in the command line
730
+ interface.
731
+
732
+ :param sql: use ``--sql`` mode
733
+
734
+ :param tag: an arbitrary "tag" that can be intercepted by custom
735
+ ``env.py`` scripts via the :class:`.EnvironmentContext.get_tag_argument`
736
+ method.
737
+
738
+ :param purge: delete all entries in the version table before stamping.
739
+
740
+ """
741
+
742
+ script = ScriptDirectory.from_config(config)
743
+
744
+ if sql:
745
+ destination_revs = []
746
+ starting_rev = None
747
+ for _revision in util.to_list(revision):
748
+ if ":" in _revision:
749
+ srev, _revision = _revision.split(":", 2)
750
+
751
+ if starting_rev != srev:
752
+ if starting_rev is None:
753
+ starting_rev = srev
754
+ else:
755
+ raise util.CommandError(
756
+ "Stamp operation with --sql only supports a "
757
+ "single starting revision at a time"
758
+ )
759
+ destination_revs.append(_revision)
760
+ else:
761
+ destination_revs = util.to_list(revision)
762
+
763
+ def do_stamp(rev, context):
764
+ return script._stamp_revs(util.to_tuple(destination_revs), rev)
765
+
766
+ with EnvironmentContext(
767
+ config,
768
+ script,
769
+ fn=do_stamp,
770
+ as_sql=sql,
771
+ starting_rev=starting_rev if sql else None,
772
+ destination_rev=util.to_tuple(destination_revs),
773
+ tag=tag,
774
+ purge=purge,
775
+ ):
776
+ script.run_env()
777
+
778
+
779
+ def edit(config: Config, rev: str) -> None:
780
+ """Edit revision script(s) using $EDITOR.
781
+
782
+ :param config: a :class:`.Config` instance.
783
+
784
+ :param rev: target revision.
785
+
786
+ """
787
+
788
+ script = ScriptDirectory.from_config(config)
789
+
790
+ if rev == "current":
791
+
792
+ def edit_current(rev, context):
793
+ if not rev:
794
+ raise util.CommandError("No current revisions")
795
+ for sc in script.get_revisions(rev):
796
+ util.open_in_editor(sc.path)
797
+ return []
798
+
799
+ with EnvironmentContext(config, script, fn=edit_current):
800
+ script.run_env()
801
+ else:
802
+ revs = script.get_revisions(rev)
803
+ if not revs:
804
+ raise util.CommandError(
805
+ "No revision files indicated by symbol '%s'" % rev
806
+ )
807
+ for sc in revs:
808
+ assert sc
809
+ util.open_in_editor(sc.path)
810
+
811
+
812
+ def ensure_version(config: Config, sql: bool = False) -> None:
813
+ """Create the alembic version table if it doesn't exist already .
814
+
815
+ :param config: a :class:`.Config` instance.
816
+
817
+ :param sql: use ``--sql`` mode.
818
+
819
+ .. versionadded:: 1.7.6
820
+
821
+ """
822
+
823
+ script = ScriptDirectory.from_config(config)
824
+
825
+ def do_ensure_version(rev, context):
826
+ context._ensure_version_table()
827
+ return []
828
+
829
+ with EnvironmentContext(
830
+ config,
831
+ script,
832
+ fn=do_ensure_version,
833
+ as_sql=sql,
834
+ ):
835
+ script.run_env()
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/config.py ADDED
@@ -0,0 +1,1020 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from argparse import ArgumentParser
4
+ from argparse import Namespace
5
+ from configparser import ConfigParser
6
+ import inspect
7
+ import os
8
+ from pathlib import Path
9
+ import re
10
+ import sys
11
+ from typing import Any
12
+ from typing import cast
13
+ from typing import Dict
14
+ from typing import Mapping
15
+ from typing import Optional
16
+ from typing import overload
17
+ from typing import Protocol
18
+ from typing import Sequence
19
+ from typing import TextIO
20
+ from typing import Union
21
+
22
+ from typing_extensions import TypedDict
23
+
24
+ from . import __version__
25
+ from . import command
26
+ from . import util
27
+ from .util import compat
28
+ from .util.pyfiles import _preserving_path_as_str
29
+
30
+
31
+ class Config:
32
+ r"""Represent an Alembic configuration.
33
+
34
+ Within an ``env.py`` script, this is available
35
+ via the :attr:`.EnvironmentContext.config` attribute,
36
+ which in turn is available at ``alembic.context``::
37
+
38
+ from alembic import context
39
+
40
+ some_param = context.config.get_main_option("my option")
41
+
42
+ When invoking Alembic programmatically, a new
43
+ :class:`.Config` can be created by passing
44
+ the name of an .ini file to the constructor::
45
+
46
+ from alembic.config import Config
47
+ alembic_cfg = Config("/path/to/yourapp/alembic.ini")
48
+
49
+ With a :class:`.Config` object, you can then
50
+ run Alembic commands programmatically using the directives
51
+ in :mod:`alembic.command`.
52
+
53
+ The :class:`.Config` object can also be constructed without
54
+ a filename. Values can be set programmatically, and
55
+ new sections will be created as needed::
56
+
57
+ from alembic.config import Config
58
+ alembic_cfg = Config()
59
+ alembic_cfg.set_main_option("script_location", "myapp:migrations")
60
+ alembic_cfg.set_main_option("sqlalchemy.url", "postgresql://foo/bar")
61
+ alembic_cfg.set_section_option("mysection", "foo", "bar")
62
+
63
+ .. warning::
64
+
65
+ When using programmatic configuration, make sure the
66
+ ``env.py`` file in use is compatible with the target configuration;
67
+ including that the call to Python ``logging.fileConfig()`` is
68
+ omitted if the programmatic configuration doesn't actually include
69
+ logging directives.
70
+
71
+ For passing non-string values to environments, such as connections and
72
+ engines, use the :attr:`.Config.attributes` dictionary::
73
+
74
+ with engine.begin() as connection:
75
+ alembic_cfg.attributes['connection'] = connection
76
+ command.upgrade(alembic_cfg, "head")
77
+
78
+ :param file\_: name of the .ini file to open if an ``alembic.ini`` is
79
+ to be used. This should refer to the ``alembic.ini`` file, either as
80
+ a filename or a full path to the file. This filename if passed must refer
81
+ to an **ini file in ConfigParser format** only.
82
+
83
+ :param toml\_file: name of the pyproject.toml file to open if a
84
+ ``pyproject.toml`` file is to be used. This should refer to the
85
+ ``pyproject.toml`` file, either as a filename or a full path to the file.
86
+ This file must be in toml format. Both :paramref:`.Config.file\_` and
87
+ :paramref:`.Config.toml\_file` may be passed simultaneously, or
88
+ exclusively.
89
+
90
+ .. versionadded:: 1.16.0
91
+
92
+ :param ini_section: name of the main Alembic section within the
93
+ .ini file
94
+ :param output_buffer: optional file-like input buffer which
95
+ will be passed to the :class:`.MigrationContext` - used to redirect
96
+ the output of "offline generation" when using Alembic programmatically.
97
+ :param stdout: buffer where the "print" output of commands will be sent.
98
+ Defaults to ``sys.stdout``.
99
+
100
+ :param config_args: A dictionary of keys and values that will be used
101
+ for substitution in the alembic config file, as well as the pyproject.toml
102
+ file, depending on which / both are used. The dictionary as given is
103
+ **copied** to two new, independent dictionaries, stored locally under the
104
+ attributes ``.config_args`` and ``.toml_args``. Both of these
105
+ dictionaries will also be populated with the replacement variable
106
+ ``%(here)s``, which refers to the location of the .ini and/or .toml file
107
+ as appropriate.
108
+
109
+ :param attributes: optional dictionary of arbitrary Python keys/values,
110
+ which will be populated into the :attr:`.Config.attributes` dictionary.
111
+
112
+ .. seealso::
113
+
114
+ :ref:`connection_sharing`
115
+
116
+ """
117
+
118
+ def __init__(
119
+ self,
120
+ file_: Union[str, os.PathLike[str], None] = None,
121
+ toml_file: Union[str, os.PathLike[str], None] = None,
122
+ ini_section: str = "alembic",
123
+ output_buffer: Optional[TextIO] = None,
124
+ stdout: TextIO = sys.stdout,
125
+ cmd_opts: Optional[Namespace] = None,
126
+ config_args: Mapping[str, Any] = util.immutabledict(),
127
+ attributes: Optional[Dict[str, Any]] = None,
128
+ ) -> None:
129
+ """Construct a new :class:`.Config`"""
130
+ self.config_file_name = (
131
+ _preserving_path_as_str(file_) if file_ else None
132
+ )
133
+ self.toml_file_name = (
134
+ _preserving_path_as_str(toml_file) if toml_file else None
135
+ )
136
+ self.config_ini_section = ini_section
137
+ self.output_buffer = output_buffer
138
+ self.stdout = stdout
139
+ self.cmd_opts = cmd_opts
140
+ self.config_args = dict(config_args)
141
+ self.toml_args = dict(config_args)
142
+ if attributes:
143
+ self.attributes.update(attributes)
144
+
145
+ cmd_opts: Optional[Namespace] = None
146
+ """The command-line options passed to the ``alembic`` script.
147
+
148
+ Within an ``env.py`` script this can be accessed via the
149
+ :attr:`.EnvironmentContext.config` attribute.
150
+
151
+ .. seealso::
152
+
153
+ :meth:`.EnvironmentContext.get_x_argument`
154
+
155
+ """
156
+
157
+ config_file_name: Optional[str] = None
158
+ """Filesystem path to the .ini file in use."""
159
+
160
+ toml_file_name: Optional[str] = None
161
+ """Filesystem path to the pyproject.toml file in use.
162
+
163
+ .. versionadded:: 1.16.0
164
+
165
+ """
166
+
167
+ @property
168
+ def _config_file_path(self) -> Optional[Path]:
169
+ if self.config_file_name is None:
170
+ return None
171
+ return Path(self.config_file_name)
172
+
173
+ @property
174
+ def _toml_file_path(self) -> Optional[Path]:
175
+ if self.toml_file_name is None:
176
+ return None
177
+ return Path(self.toml_file_name)
178
+
179
+ config_ini_section: str = None # type:ignore[assignment]
180
+ """Name of the config file section to read basic configuration
181
+ from. Defaults to ``alembic``, that is the ``[alembic]`` section
182
+ of the .ini file. This value is modified using the ``-n/--name``
183
+ option to the Alembic runner.
184
+
185
+ """
186
+
187
+ @util.memoized_property
188
+ def attributes(self) -> Dict[str, Any]:
189
+ """A Python dictionary for storage of additional state.
190
+
191
+
192
+ This is a utility dictionary which can include not just strings but
193
+ engines, connections, schema objects, or anything else.
194
+ Use this to pass objects into an env.py script, such as passing
195
+ a :class:`sqlalchemy.engine.base.Connection` when calling
196
+ commands from :mod:`alembic.command` programmatically.
197
+
198
+ .. seealso::
199
+
200
+ :ref:`connection_sharing`
201
+
202
+ :paramref:`.Config.attributes`
203
+
204
+ """
205
+ return {}
206
+
207
+ def print_stdout(self, text: str, *arg: Any) -> None:
208
+ """Render a message to standard out.
209
+
210
+ When :meth:`.Config.print_stdout` is called with additional args
211
+ those arguments will formatted against the provided text,
212
+ otherwise we simply output the provided text verbatim.
213
+
214
+ This is a no-op when the``quiet`` messaging option is enabled.
215
+
216
+ e.g.::
217
+
218
+ >>> config.print_stdout('Some text %s', 'arg')
219
+ Some Text arg
220
+
221
+ """
222
+
223
+ if arg:
224
+ output = str(text) % arg
225
+ else:
226
+ output = str(text)
227
+
228
+ util.write_outstream(self.stdout, output, "\n", **self.messaging_opts)
229
+
230
+ @util.memoized_property
231
+ def file_config(self) -> ConfigParser:
232
+ """Return the underlying ``ConfigParser`` object.
233
+
234
+ Dir*-ect access to the .ini file is available here,
235
+ though the :meth:`.Config.get_section` and
236
+ :meth:`.Config.get_main_option`
237
+ methods provide a possibly simpler interface.
238
+
239
+ """
240
+
241
+ if self._config_file_path:
242
+ here = self._config_file_path.absolute().parent
243
+ else:
244
+ here = Path()
245
+ self.config_args["here"] = here.as_posix()
246
+ file_config = ConfigParser(self.config_args)
247
+ if self._config_file_path:
248
+ compat.read_config_parser(file_config, [self._config_file_path])
249
+ else:
250
+ file_config.add_section(self.config_ini_section)
251
+ return file_config
252
+
253
+ @util.memoized_property
254
+ def toml_alembic_config(self) -> Mapping[str, Any]:
255
+ """Return a dictionary of the [tool.alembic] section from
256
+ pyproject.toml"""
257
+
258
+ if self._toml_file_path and self._toml_file_path.exists():
259
+
260
+ here = self._toml_file_path.absolute().parent
261
+ self.toml_args["here"] = here.as_posix()
262
+
263
+ with open(self._toml_file_path, "rb") as f:
264
+ toml_data = compat.tomllib.load(f)
265
+ data = toml_data.get("tool", {}).get("alembic", {})
266
+ if not isinstance(data, dict):
267
+ raise util.CommandError("Incorrect TOML format")
268
+ return data
269
+
270
+ else:
271
+ return {}
272
+
273
+ def get_template_directory(self) -> str:
274
+ """Return the directory where Alembic setup templates are found.
275
+
276
+ This method is used by the alembic ``init`` and ``list_templates``
277
+ commands.
278
+
279
+ """
280
+ import alembic
281
+
282
+ package_dir = Path(alembic.__file__).absolute().parent
283
+ return str(package_dir / "templates")
284
+
285
+ def _get_template_path(self) -> Path:
286
+ """Return the directory where Alembic setup templates are found.
287
+
288
+ This method is used by the alembic ``init`` and ``list_templates``
289
+ commands.
290
+
291
+ .. versionadded:: 1.16.0
292
+
293
+ """
294
+ return Path(self.get_template_directory())
295
+
296
+ @overload
297
+ def get_section(
298
+ self, name: str, default: None = ...
299
+ ) -> Optional[Dict[str, str]]: ...
300
+
301
+ # "default" here could also be a TypeVar
302
+ # _MT = TypeVar("_MT", bound=Mapping[str, str]),
303
+ # however mypy wasn't handling that correctly (pyright was)
304
+ @overload
305
+ def get_section(
306
+ self, name: str, default: Dict[str, str]
307
+ ) -> Dict[str, str]: ...
308
+
309
+ @overload
310
+ def get_section(
311
+ self, name: str, default: Mapping[str, str]
312
+ ) -> Union[Dict[str, str], Mapping[str, str]]: ...
313
+
314
+ def get_section(
315
+ self, name: str, default: Optional[Mapping[str, str]] = None
316
+ ) -> Optional[Mapping[str, str]]:
317
+ """Return all the configuration options from a given .ini file section
318
+ as a dictionary.
319
+
320
+ If the given section does not exist, the value of ``default``
321
+ is returned, which is expected to be a dictionary or other mapping.
322
+
323
+ """
324
+ if not self.file_config.has_section(name):
325
+ return default
326
+
327
+ return dict(self.file_config.items(name))
328
+
329
+ def set_main_option(self, name: str, value: str) -> None:
330
+ """Set an option programmatically within the 'main' section.
331
+
332
+ This overrides whatever was in the .ini file.
333
+
334
+ :param name: name of the value
335
+
336
+ :param value: the value. Note that this value is passed to
337
+ ``ConfigParser.set``, which supports variable interpolation using
338
+ pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of
339
+ an interpolation symbol must therefore be escaped, e.g. ``%%``.
340
+ The given value may refer to another value already in the file
341
+ using the interpolation format.
342
+
343
+ """
344
+ self.set_section_option(self.config_ini_section, name, value)
345
+
346
+ def remove_main_option(self, name: str) -> None:
347
+ self.file_config.remove_option(self.config_ini_section, name)
348
+
349
+ def set_section_option(self, section: str, name: str, value: str) -> None:
350
+ """Set an option programmatically within the given section.
351
+
352
+ The section is created if it doesn't exist already.
353
+ The value here will override whatever was in the .ini
354
+ file.
355
+
356
+ Does **NOT** consume from the pyproject.toml file.
357
+
358
+ .. seealso::
359
+
360
+ :meth:`.Config.get_alembic_option` - includes pyproject support
361
+
362
+ :param section: name of the section
363
+
364
+ :param name: name of the value
365
+
366
+ :param value: the value. Note that this value is passed to
367
+ ``ConfigParser.set``, which supports variable interpolation using
368
+ pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of
369
+ an interpolation symbol must therefore be escaped, e.g. ``%%``.
370
+ The given value may refer to another value already in the file
371
+ using the interpolation format.
372
+
373
+ """
374
+
375
+ if not self.file_config.has_section(section):
376
+ self.file_config.add_section(section)
377
+ self.file_config.set(section, name, value)
378
+
379
+ def get_section_option(
380
+ self, section: str, name: str, default: Optional[str] = None
381
+ ) -> Optional[str]:
382
+ """Return an option from the given section of the .ini file."""
383
+ if not self.file_config.has_section(section):
384
+ raise util.CommandError(
385
+ "No config file %r found, or file has no "
386
+ "'[%s]' section" % (self.config_file_name, section)
387
+ )
388
+ if self.file_config.has_option(section, name):
389
+ return self.file_config.get(section, name)
390
+ else:
391
+ return default
392
+
393
+ @overload
394
+ def get_main_option(self, name: str, default: str) -> str: ...
395
+
396
+ @overload
397
+ def get_main_option(
398
+ self, name: str, default: Optional[str] = None
399
+ ) -> Optional[str]: ...
400
+
401
+ def get_main_option(
402
+ self, name: str, default: Optional[str] = None
403
+ ) -> Optional[str]:
404
+ """Return an option from the 'main' section of the .ini file.
405
+
406
+ This defaults to being a key from the ``[alembic]``
407
+ section, unless the ``-n/--name`` flag were used to
408
+ indicate a different section.
409
+
410
+ Does **NOT** consume from the pyproject.toml file.
411
+
412
+ .. seealso::
413
+
414
+ :meth:`.Config.get_alembic_option` - includes pyproject support
415
+
416
+ """
417
+ return self.get_section_option(self.config_ini_section, name, default)
418
+
419
+ @overload
420
+ def get_alembic_option(self, name: str, default: str) -> str: ...
421
+
422
+ @overload
423
+ def get_alembic_option(
424
+ self, name: str, default: Optional[str] = None
425
+ ) -> Optional[str]: ...
426
+
427
+ def get_alembic_option(
428
+ self, name: str, default: Optional[str] = None
429
+ ) -> Union[None, str, list[str], dict[str, str], list[dict[str, str]]]:
430
+ """Return an option from the "[alembic]" or "[tool.alembic]" section
431
+ of the configparser-parsed .ini file (e.g. ``alembic.ini``) or
432
+ toml-parsed ``pyproject.toml`` file.
433
+
434
+ The value returned is expected to be None, string, list of strings,
435
+ or dictionary of strings. Within each type of string value, the
436
+ ``%(here)s`` token is substituted out with the absolute path of the
437
+ ``pyproject.toml`` file, as are other tokens which are extracted from
438
+ the :paramref:`.Config.config_args` dictionary.
439
+
440
+ Searches always prioritize the configparser namespace first, before
441
+ searching in the toml namespace.
442
+
443
+ If Alembic was run using the ``-n/--name`` flag to indicate an
444
+ alternate main section name, this is taken into account **only** for
445
+ the configparser-parsed .ini file. The section name in toml is always
446
+ ``[tool.alembic]``.
447
+
448
+
449
+ .. versionadded:: 1.16.0
450
+
451
+ """
452
+
453
+ if self.file_config.has_option(self.config_ini_section, name):
454
+ return self.file_config.get(self.config_ini_section, name)
455
+ else:
456
+ return self._get_toml_config_value(name, default=default)
457
+
458
+ def get_alembic_boolean_option(self, name: str) -> bool:
459
+ if self.file_config.has_option(self.config_ini_section, name):
460
+ return (
461
+ self.file_config.get(self.config_ini_section, name) == "true"
462
+ )
463
+ else:
464
+ value = self.toml_alembic_config.get(name, False)
465
+ if not isinstance(value, bool):
466
+ raise util.CommandError(
467
+ f"boolean value expected for TOML parameter {name!r}"
468
+ )
469
+ return value
470
+
471
+ def _get_toml_config_value(
472
+ self, name: str, default: Optional[Any] = None
473
+ ) -> Union[None, str, list[str], dict[str, str], list[dict[str, str]]]:
474
+ USE_DEFAULT = object()
475
+ value: Union[None, str, list[str], dict[str, str]] = (
476
+ self.toml_alembic_config.get(name, USE_DEFAULT)
477
+ )
478
+ if value is USE_DEFAULT:
479
+ return default
480
+ if value is not None:
481
+ if isinstance(value, str):
482
+ value = value % (self.toml_args)
483
+ elif isinstance(value, list):
484
+ if value and isinstance(value[0], dict):
485
+ value = [
486
+ {k: v % (self.toml_args) for k, v in dv.items()}
487
+ for dv in value
488
+ ]
489
+ else:
490
+ value = cast(
491
+ "list[str]", [v % (self.toml_args) for v in value]
492
+ )
493
+ elif isinstance(value, dict):
494
+ value = cast(
495
+ "dict[str, str]",
496
+ {k: v % (self.toml_args) for k, v in value.items()},
497
+ )
498
+ else:
499
+ raise util.CommandError(
500
+ f"unsupported TOML value type for key: {name!r}"
501
+ )
502
+ return value
503
+
504
+ @util.memoized_property
505
+ def messaging_opts(self) -> MessagingOptions:
506
+ """The messaging options."""
507
+ return cast(
508
+ MessagingOptions,
509
+ util.immutabledict(
510
+ {"quiet": getattr(self.cmd_opts, "quiet", False)}
511
+ ),
512
+ )
513
+
514
+ def _get_file_separator_char(self, *names: str) -> Optional[str]:
515
+ for name in names:
516
+ separator = self.get_main_option(name)
517
+ if separator is not None:
518
+ break
519
+ else:
520
+ return None
521
+
522
+ split_on_path = {
523
+ "space": " ",
524
+ "newline": "\n",
525
+ "os": os.pathsep,
526
+ ":": ":",
527
+ ";": ";",
528
+ }
529
+
530
+ try:
531
+ sep = split_on_path[separator]
532
+ except KeyError as ke:
533
+ raise ValueError(
534
+ "'%s' is not a valid value for %s; "
535
+ "expected 'space', 'newline', 'os', ':', ';'"
536
+ % (separator, name)
537
+ ) from ke
538
+ else:
539
+ if name == "version_path_separator":
540
+ util.warn_deprecated(
541
+ "The version_path_separator configuration parameter "
542
+ "is deprecated; please use path_separator"
543
+ )
544
+ return sep
545
+
546
+ def get_version_locations_list(self) -> Optional[list[str]]:
547
+
548
+ version_locations_str = self.file_config.get(
549
+ self.config_ini_section, "version_locations", fallback=None
550
+ )
551
+
552
+ if version_locations_str:
553
+ split_char = self._get_file_separator_char(
554
+ "path_separator", "version_path_separator"
555
+ )
556
+
557
+ if split_char is None:
558
+
559
+ # legacy behaviour for backwards compatibility
560
+ util.warn_deprecated(
561
+ "No path_separator found in configuration; "
562
+ "falling back to legacy splitting on spaces/commas "
563
+ "for version_locations. Consider adding "
564
+ "path_separator=os to Alembic config."
565
+ )
566
+
567
+ _split_on_space_comma = re.compile(r", *|(?: +)")
568
+ return _split_on_space_comma.split(version_locations_str)
569
+ else:
570
+ return [
571
+ x.strip()
572
+ for x in version_locations_str.split(split_char)
573
+ if x
574
+ ]
575
+ else:
576
+ return cast(
577
+ "list[str]",
578
+ self._get_toml_config_value("version_locations", None),
579
+ )
580
+
581
+ def get_prepend_sys_paths_list(self) -> Optional[list[str]]:
582
+ prepend_sys_path_str = self.file_config.get(
583
+ self.config_ini_section, "prepend_sys_path", fallback=None
584
+ )
585
+
586
+ if prepend_sys_path_str:
587
+ split_char = self._get_file_separator_char("path_separator")
588
+
589
+ if split_char is None:
590
+
591
+ # legacy behaviour for backwards compatibility
592
+ util.warn_deprecated(
593
+ "No path_separator found in configuration; "
594
+ "falling back to legacy splitting on spaces, commas, "
595
+ "and colons for prepend_sys_path. Consider adding "
596
+ "path_separator=os to Alembic config."
597
+ )
598
+
599
+ _split_on_space_comma_colon = re.compile(r", *|(?: +)|\:")
600
+ return _split_on_space_comma_colon.split(prepend_sys_path_str)
601
+ else:
602
+ return [
603
+ x.strip()
604
+ for x in prepend_sys_path_str.split(split_char)
605
+ if x
606
+ ]
607
+ else:
608
+ return cast(
609
+ "list[str]",
610
+ self._get_toml_config_value("prepend_sys_path", None),
611
+ )
612
+
613
+ def get_hooks_list(self) -> list[PostWriteHookConfig]:
614
+
615
+ hooks: list[PostWriteHookConfig] = []
616
+
617
+ if not self.file_config.has_section("post_write_hooks"):
618
+ toml_hook_config = cast(
619
+ "list[dict[str, str]]",
620
+ self._get_toml_config_value("post_write_hooks", []),
621
+ )
622
+ for cfg in toml_hook_config:
623
+ opts = dict(cfg)
624
+ opts["_hook_name"] = opts.pop("name")
625
+ hooks.append(opts)
626
+
627
+ else:
628
+ _split_on_space_comma = re.compile(r", *|(?: +)")
629
+ ini_hook_config = self.get_section("post_write_hooks", {})
630
+ names = _split_on_space_comma.split(
631
+ ini_hook_config.get("hooks", "")
632
+ )
633
+
634
+ for name in names:
635
+ if not name:
636
+ continue
637
+ opts = {
638
+ key[len(name) + 1 :]: ini_hook_config[key]
639
+ for key in ini_hook_config
640
+ if key.startswith(name + ".")
641
+ }
642
+
643
+ opts["_hook_name"] = name
644
+ hooks.append(opts)
645
+
646
+ return hooks
647
+
648
+
649
+ PostWriteHookConfig = Mapping[str, str]
650
+
651
+
652
+ class MessagingOptions(TypedDict, total=False):
653
+ quiet: bool
654
+
655
+
656
+ class CommandFunction(Protocol):
657
+ """A function that may be registered in the CLI as an alembic command.
658
+ It must be a named function and it must accept a :class:`.Config` object
659
+ as the first argument.
660
+
661
+ .. versionadded:: 1.15.3
662
+
663
+ """
664
+
665
+ __name__: str
666
+
667
+ def __call__(self, config: Config, *args: Any, **kwargs: Any) -> Any: ...
668
+
669
+
670
+ class CommandLine:
671
+ """Provides the command line interface to Alembic."""
672
+
673
+ def __init__(self, prog: Optional[str] = None) -> None:
674
+ self._generate_args(prog)
675
+
676
+ _KWARGS_OPTS = {
677
+ "template": (
678
+ "-t",
679
+ "--template",
680
+ dict(
681
+ default="generic",
682
+ type=str,
683
+ help="Setup template for use with 'init'",
684
+ ),
685
+ ),
686
+ "message": (
687
+ "-m",
688
+ "--message",
689
+ dict(type=str, help="Message string to use with 'revision'"),
690
+ ),
691
+ "sql": (
692
+ "--sql",
693
+ dict(
694
+ action="store_true",
695
+ help="Don't emit SQL to database - dump to "
696
+ "standard output/file instead. See docs on "
697
+ "offline mode.",
698
+ ),
699
+ ),
700
+ "tag": (
701
+ "--tag",
702
+ dict(
703
+ type=str,
704
+ help="Arbitrary 'tag' name - can be used by "
705
+ "custom env.py scripts.",
706
+ ),
707
+ ),
708
+ "head": (
709
+ "--head",
710
+ dict(
711
+ type=str,
712
+ help="Specify head revision or <branchname>@head "
713
+ "to base new revision on.",
714
+ ),
715
+ ),
716
+ "splice": (
717
+ "--splice",
718
+ dict(
719
+ action="store_true",
720
+ help="Allow a non-head revision as the 'head' to splice onto",
721
+ ),
722
+ ),
723
+ "depends_on": (
724
+ "--depends-on",
725
+ dict(
726
+ action="append",
727
+ help="Specify one or more revision identifiers "
728
+ "which this revision should depend on.",
729
+ ),
730
+ ),
731
+ "rev_id": (
732
+ "--rev-id",
733
+ dict(
734
+ type=str,
735
+ help="Specify a hardcoded revision id instead of "
736
+ "generating one",
737
+ ),
738
+ ),
739
+ "version_path": (
740
+ "--version-path",
741
+ dict(
742
+ type=str,
743
+ help="Specify specific path from config for version file",
744
+ ),
745
+ ),
746
+ "branch_label": (
747
+ "--branch-label",
748
+ dict(
749
+ type=str,
750
+ help="Specify a branch label to apply to the new revision",
751
+ ),
752
+ ),
753
+ "verbose": (
754
+ "-v",
755
+ "--verbose",
756
+ dict(action="store_true", help="Use more verbose output"),
757
+ ),
758
+ "resolve_dependencies": (
759
+ "--resolve-dependencies",
760
+ dict(
761
+ action="store_true",
762
+ help="Treat dependency versions as down revisions",
763
+ ),
764
+ ),
765
+ "autogenerate": (
766
+ "--autogenerate",
767
+ dict(
768
+ action="store_true",
769
+ help="Populate revision script with candidate "
770
+ "migration operations, based on comparison "
771
+ "of database to model.",
772
+ ),
773
+ ),
774
+ "rev_range": (
775
+ "-r",
776
+ "--rev-range",
777
+ dict(
778
+ action="store",
779
+ help="Specify a revision range; format is [start]:[end]",
780
+ ),
781
+ ),
782
+ "indicate_current": (
783
+ "-i",
784
+ "--indicate-current",
785
+ dict(
786
+ action="store_true",
787
+ help="Indicate the current revision",
788
+ ),
789
+ ),
790
+ "purge": (
791
+ "--purge",
792
+ dict(
793
+ action="store_true",
794
+ help="Unconditionally erase the version table before stamping",
795
+ ),
796
+ ),
797
+ "package": (
798
+ "--package",
799
+ dict(
800
+ action="store_true",
801
+ help="Write empty __init__.py files to the "
802
+ "environment and version locations",
803
+ ),
804
+ ),
805
+ }
806
+ _POSITIONAL_OPTS = {
807
+ "directory": dict(help="location of scripts directory"),
808
+ "revision": dict(
809
+ help="revision identifier",
810
+ ),
811
+ "revisions": dict(
812
+ nargs="+",
813
+ help="one or more revisions, or 'heads' for all heads",
814
+ ),
815
+ }
816
+ _POSITIONAL_TRANSLATIONS: dict[Any, dict[str, str]] = {
817
+ command.stamp: {"revision": "revisions"}
818
+ }
819
+
820
+ def _generate_args(self, prog: Optional[str]) -> None:
821
+ parser = ArgumentParser(prog=prog)
822
+
823
+ parser.add_argument(
824
+ "--version", action="version", version="%%(prog)s %s" % __version__
825
+ )
826
+ parser.add_argument(
827
+ "-c",
828
+ "--config",
829
+ action="append",
830
+ help="Alternate config file; defaults to value of "
831
+ 'ALEMBIC_CONFIG environment variable, or "alembic.ini". '
832
+ "May also refer to pyproject.toml file. May be specified twice "
833
+ "to reference both files separately",
834
+ )
835
+ parser.add_argument(
836
+ "-n",
837
+ "--name",
838
+ type=str,
839
+ default="alembic",
840
+ help="Name of section in .ini file to use for Alembic config "
841
+ "(only applies to configparser config, not toml)",
842
+ )
843
+ parser.add_argument(
844
+ "-x",
845
+ action="append",
846
+ help="Additional arguments consumed by "
847
+ "custom env.py scripts, e.g. -x "
848
+ "setting1=somesetting -x setting2=somesetting",
849
+ )
850
+ parser.add_argument(
851
+ "--raiseerr",
852
+ action="store_true",
853
+ help="Raise a full stack trace on error",
854
+ )
855
+ parser.add_argument(
856
+ "-q",
857
+ "--quiet",
858
+ action="store_true",
859
+ help="Do not log to std output.",
860
+ )
861
+
862
+ self.subparsers = parser.add_subparsers()
863
+ alembic_commands = (
864
+ cast(CommandFunction, fn)
865
+ for fn in (getattr(command, name) for name in dir(command))
866
+ if (
867
+ inspect.isfunction(fn)
868
+ and fn.__name__[0] != "_"
869
+ and fn.__module__ == "alembic.command"
870
+ )
871
+ )
872
+
873
+ for fn in alembic_commands:
874
+ self.register_command(fn)
875
+
876
+ self.parser = parser
877
+
878
+ def register_command(self, fn: CommandFunction) -> None:
879
+ """Registers a function as a CLI subcommand. The subcommand name
880
+ matches the function name, the arguments are extracted from the
881
+ signature and the help text is read from the docstring.
882
+
883
+ .. versionadded:: 1.15.3
884
+
885
+ .. seealso::
886
+
887
+ :ref:`custom_commandline`
888
+ """
889
+
890
+ positional, kwarg, help_text = self._inspect_function(fn)
891
+
892
+ subparser = self.subparsers.add_parser(fn.__name__, help=help_text)
893
+ subparser.set_defaults(cmd=(fn, positional, kwarg))
894
+
895
+ for arg in kwarg:
896
+ if arg in self._KWARGS_OPTS:
897
+ kwarg_opt = self._KWARGS_OPTS[arg]
898
+ args, opts = kwarg_opt[0:-1], kwarg_opt[-1]
899
+ subparser.add_argument(*args, **opts) # type:ignore
900
+
901
+ for arg in positional:
902
+ opts = self._POSITIONAL_OPTS.get(arg, {})
903
+ subparser.add_argument(arg, **opts) # type:ignore
904
+
905
+ def _inspect_function(self, fn: CommandFunction) -> tuple[Any, Any, str]:
906
+ spec = compat.inspect_getfullargspec(fn)
907
+ if spec[3] is not None:
908
+ positional = spec[0][1 : -len(spec[3])]
909
+ kwarg = spec[0][-len(spec[3]) :]
910
+ else:
911
+ positional = spec[0][1:]
912
+ kwarg = []
913
+
914
+ if fn in self._POSITIONAL_TRANSLATIONS:
915
+ positional = [
916
+ self._POSITIONAL_TRANSLATIONS[fn].get(name, name)
917
+ for name in positional
918
+ ]
919
+
920
+ # parse first line(s) of helptext without a line break
921
+ help_ = fn.__doc__
922
+ if help_:
923
+ help_lines = []
924
+ for line in help_.split("\n"):
925
+ if not line.strip():
926
+ break
927
+ else:
928
+ help_lines.append(line.strip())
929
+ else:
930
+ help_lines = []
931
+
932
+ help_text = " ".join(help_lines)
933
+
934
+ return positional, kwarg, help_text
935
+
936
+ def run_cmd(self, config: Config, options: Namespace) -> None:
937
+ fn, positional, kwarg = options.cmd
938
+
939
+ try:
940
+ fn(
941
+ config,
942
+ *[getattr(options, k, None) for k in positional],
943
+ **{k: getattr(options, k, None) for k in kwarg},
944
+ )
945
+ except util.CommandError as e:
946
+ if options.raiseerr:
947
+ raise
948
+ else:
949
+ util.err(str(e), **config.messaging_opts)
950
+
951
+ def _inis_from_config(self, options: Namespace) -> tuple[str, str]:
952
+ names = options.config
953
+
954
+ alembic_config_env = os.environ.get("ALEMBIC_CONFIG")
955
+ if (
956
+ alembic_config_env
957
+ and os.path.basename(alembic_config_env) == "pyproject.toml"
958
+ ):
959
+ default_pyproject_toml = alembic_config_env
960
+ default_alembic_config = "alembic.ini"
961
+ elif alembic_config_env:
962
+ default_pyproject_toml = "pyproject.toml"
963
+ default_alembic_config = alembic_config_env
964
+ else:
965
+ default_alembic_config = "alembic.ini"
966
+ default_pyproject_toml = "pyproject.toml"
967
+
968
+ if not names:
969
+ return default_pyproject_toml, default_alembic_config
970
+
971
+ toml = ini = None
972
+
973
+ for name in names:
974
+ if os.path.basename(name) == "pyproject.toml":
975
+ if toml is not None:
976
+ raise util.CommandError(
977
+ "pyproject.toml indicated more than once"
978
+ )
979
+ toml = name
980
+ else:
981
+ if ini is not None:
982
+ raise util.CommandError(
983
+ "only one ini file may be indicated"
984
+ )
985
+ ini = name
986
+
987
+ return toml if toml else default_pyproject_toml, (
988
+ ini if ini else default_alembic_config
989
+ )
990
+
991
+ def main(self, argv: Optional[Sequence[str]] = None) -> None:
992
+ """Executes the command line with the provided arguments."""
993
+ options = self.parser.parse_args(argv)
994
+ if not hasattr(options, "cmd"):
995
+ # see http://bugs.python.org/issue9253, argparse
996
+ # behavior changed incompatibly in py3.3
997
+ self.parser.error("too few arguments")
998
+ else:
999
+ toml, ini = self._inis_from_config(options)
1000
+ cfg = Config(
1001
+ file_=ini,
1002
+ toml_file=toml,
1003
+ ini_section=options.name,
1004
+ cmd_opts=options,
1005
+ )
1006
+ self.run_cmd(cfg, options)
1007
+
1008
+
1009
+ def main(
1010
+ argv: Optional[Sequence[str]] = None,
1011
+ prog: Optional[str] = None,
1012
+ **kwargs: Any,
1013
+ ) -> None:
1014
+ """The console runner function for Alembic."""
1015
+
1016
+ CommandLine(prog=prog).main(argv=argv)
1017
+
1018
+
1019
+ if __name__ == "__main__":
1020
+ main()
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/context.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from .runtime.environment import EnvironmentContext
2
+
3
+ # create proxy functions for
4
+ # each method on the EnvironmentContext class.
5
+ EnvironmentContext.create_module_class_proxy(globals(), locals())
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/context.pyi ADDED
@@ -0,0 +1,856 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ### this file stubs are generated by tools/write_pyi.py - do not edit ###
2
+ # ### imports are manually managed
3
+ from __future__ import annotations
4
+
5
+ from typing import Any
6
+ from typing import Callable
7
+ from typing import Collection
8
+ from typing import Dict
9
+ from typing import Iterable
10
+ from typing import List
11
+ from typing import Literal
12
+ from typing import Mapping
13
+ from typing import MutableMapping
14
+ from typing import Optional
15
+ from typing import overload
16
+ from typing import Sequence
17
+ from typing import TextIO
18
+ from typing import Tuple
19
+ from typing import TYPE_CHECKING
20
+ from typing import Union
21
+
22
+ from typing_extensions import ContextManager
23
+
24
+ if TYPE_CHECKING:
25
+ from sqlalchemy.engine.base import Connection
26
+ from sqlalchemy.engine.url import URL
27
+ from sqlalchemy.sql import Executable
28
+ from sqlalchemy.sql.schema import Column
29
+ from sqlalchemy.sql.schema import FetchedValue
30
+ from sqlalchemy.sql.schema import MetaData
31
+ from sqlalchemy.sql.schema import SchemaItem
32
+ from sqlalchemy.sql.type_api import TypeEngine
33
+
34
+ from .autogenerate.api import AutogenContext
35
+ from .config import Config
36
+ from .operations.ops import MigrationScript
37
+ from .runtime.migration import _ProxyTransaction
38
+ from .runtime.migration import MigrationContext
39
+ from .runtime.migration import MigrationInfo
40
+ from .script import ScriptDirectory
41
+
42
+ ### end imports ###
43
+
44
+ def begin_transaction() -> (
45
+ Union[_ProxyTransaction, ContextManager[None, Optional[bool]]]
46
+ ):
47
+ """Return a context manager that will
48
+ enclose an operation within a "transaction",
49
+ as defined by the environment's offline
50
+ and transactional DDL settings.
51
+
52
+ e.g.::
53
+
54
+ with context.begin_transaction():
55
+ context.run_migrations()
56
+
57
+ :meth:`.begin_transaction` is intended to
58
+ "do the right thing" regardless of
59
+ calling context:
60
+
61
+ * If :meth:`.is_transactional_ddl` is ``False``,
62
+ returns a "do nothing" context manager
63
+ which otherwise produces no transactional
64
+ state or directives.
65
+ * If :meth:`.is_offline_mode` is ``True``,
66
+ returns a context manager that will
67
+ invoke the :meth:`.DefaultImpl.emit_begin`
68
+ and :meth:`.DefaultImpl.emit_commit`
69
+ methods, which will produce the string
70
+ directives ``BEGIN`` and ``COMMIT`` on
71
+ the output stream, as rendered by the
72
+ target backend (e.g. SQL Server would
73
+ emit ``BEGIN TRANSACTION``).
74
+ * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin`
75
+ on the current online connection, which
76
+ returns a :class:`sqlalchemy.engine.Transaction`
77
+ object. This object demarcates a real
78
+ transaction and is itself a context manager,
79
+ which will roll back if an exception
80
+ is raised.
81
+
82
+ Note that a custom ``env.py`` script which
83
+ has more specific transactional needs can of course
84
+ manipulate the :class:`~sqlalchemy.engine.Connection`
85
+ directly to produce transactional state in "online"
86
+ mode.
87
+
88
+ """
89
+
90
+ config: Config
91
+
92
+ def configure(
93
+ connection: Optional[Connection] = None,
94
+ url: Union[str, URL, None] = None,
95
+ dialect_name: Optional[str] = None,
96
+ dialect_opts: Optional[Dict[str, Any]] = None,
97
+ transactional_ddl: Optional[bool] = None,
98
+ transaction_per_migration: bool = False,
99
+ output_buffer: Optional[TextIO] = None,
100
+ starting_rev: Optional[str] = None,
101
+ tag: Optional[str] = None,
102
+ template_args: Optional[Dict[str, Any]] = None,
103
+ render_as_batch: bool = False,
104
+ target_metadata: Union[MetaData, Sequence[MetaData], None] = None,
105
+ include_name: Optional[
106
+ Callable[
107
+ [
108
+ Optional[str],
109
+ Literal[
110
+ "schema",
111
+ "table",
112
+ "column",
113
+ "index",
114
+ "unique_constraint",
115
+ "foreign_key_constraint",
116
+ ],
117
+ MutableMapping[
118
+ Literal[
119
+ "schema_name",
120
+ "table_name",
121
+ "schema_qualified_table_name",
122
+ ],
123
+ Optional[str],
124
+ ],
125
+ ],
126
+ bool,
127
+ ]
128
+ ] = None,
129
+ include_object: Optional[
130
+ Callable[
131
+ [
132
+ SchemaItem,
133
+ Optional[str],
134
+ Literal[
135
+ "schema",
136
+ "table",
137
+ "column",
138
+ "index",
139
+ "unique_constraint",
140
+ "foreign_key_constraint",
141
+ ],
142
+ bool,
143
+ Optional[SchemaItem],
144
+ ],
145
+ bool,
146
+ ]
147
+ ] = None,
148
+ include_schemas: bool = False,
149
+ process_revision_directives: Optional[
150
+ Callable[
151
+ [
152
+ MigrationContext,
153
+ Union[str, Iterable[Optional[str]], Iterable[str]],
154
+ List[MigrationScript],
155
+ ],
156
+ None,
157
+ ]
158
+ ] = None,
159
+ compare_type: Union[
160
+ bool,
161
+ Callable[
162
+ [
163
+ MigrationContext,
164
+ Column[Any],
165
+ Column[Any],
166
+ TypeEngine[Any],
167
+ TypeEngine[Any],
168
+ ],
169
+ Optional[bool],
170
+ ],
171
+ ] = True,
172
+ compare_server_default: Union[
173
+ bool,
174
+ Callable[
175
+ [
176
+ MigrationContext,
177
+ Column[Any],
178
+ Column[Any],
179
+ Optional[str],
180
+ Optional[FetchedValue],
181
+ Optional[str],
182
+ ],
183
+ Optional[bool],
184
+ ],
185
+ ] = False,
186
+ render_item: Optional[
187
+ Callable[[str, Any, AutogenContext], Union[str, Literal[False]]]
188
+ ] = None,
189
+ literal_binds: bool = False,
190
+ upgrade_token: str = "upgrades",
191
+ downgrade_token: str = "downgrades",
192
+ alembic_module_prefix: str = "op.",
193
+ sqlalchemy_module_prefix: str = "sa.",
194
+ user_module_prefix: Optional[str] = None,
195
+ on_version_apply: Optional[
196
+ Callable[
197
+ [
198
+ MigrationContext,
199
+ MigrationInfo,
200
+ Collection[Any],
201
+ Mapping[str, Any],
202
+ ],
203
+ None,
204
+ ]
205
+ ] = None,
206
+ **kw: Any,
207
+ ) -> None:
208
+ """Configure a :class:`.MigrationContext` within this
209
+ :class:`.EnvironmentContext` which will provide database
210
+ connectivity and other configuration to a series of
211
+ migration scripts.
212
+
213
+ Many methods on :class:`.EnvironmentContext` require that
214
+ this method has been called in order to function, as they
215
+ ultimately need to have database access or at least access
216
+ to the dialect in use. Those which do are documented as such.
217
+
218
+ The important thing needed by :meth:`.configure` is a
219
+ means to determine what kind of database dialect is in use.
220
+ An actual connection to that database is needed only if
221
+ the :class:`.MigrationContext` is to be used in
222
+ "online" mode.
223
+
224
+ If the :meth:`.is_offline_mode` function returns ``True``,
225
+ then no connection is needed here. Otherwise, the
226
+ ``connection`` parameter should be present as an
227
+ instance of :class:`sqlalchemy.engine.Connection`.
228
+
229
+ This function is typically called from the ``env.py``
230
+ script within a migration environment. It can be called
231
+ multiple times for an invocation. The most recent
232
+ :class:`~sqlalchemy.engine.Connection`
233
+ for which it was called is the one that will be operated upon
234
+ by the next call to :meth:`.run_migrations`.
235
+
236
+ General parameters:
237
+
238
+ :param connection: a :class:`~sqlalchemy.engine.Connection`
239
+ to use
240
+ for SQL execution in "online" mode. When present, is also
241
+ used to determine the type of dialect in use.
242
+ :param url: a string database url, or a
243
+ :class:`sqlalchemy.engine.url.URL` object.
244
+ The type of dialect to be used will be derived from this if
245
+ ``connection`` is not passed.
246
+ :param dialect_name: string name of a dialect, such as
247
+ "postgresql", "mssql", etc.
248
+ The type of dialect to be used will be derived from this if
249
+ ``connection`` and ``url`` are not passed.
250
+ :param dialect_opts: dictionary of options to be passed to dialect
251
+ constructor.
252
+ :param transactional_ddl: Force the usage of "transactional"
253
+ DDL on or off;
254
+ this otherwise defaults to whether or not the dialect in
255
+ use supports it.
256
+ :param transaction_per_migration: if True, nest each migration script
257
+ in a transaction rather than the full series of migrations to
258
+ run.
259
+ :param output_buffer: a file-like object that will be used
260
+ for textual output
261
+ when the ``--sql`` option is used to generate SQL scripts.
262
+ Defaults to
263
+ ``sys.stdout`` if not passed here and also not present on
264
+ the :class:`.Config`
265
+ object. The value here overrides that of the :class:`.Config`
266
+ object.
267
+ :param output_encoding: when using ``--sql`` to generate SQL
268
+ scripts, apply this encoding to the string output.
269
+ :param literal_binds: when using ``--sql`` to generate SQL
270
+ scripts, pass through the ``literal_binds`` flag to the compiler
271
+ so that any literal values that would ordinarily be bound
272
+ parameters are converted to plain strings.
273
+
274
+ .. warning:: Dialects can typically only handle simple datatypes
275
+ like strings and numbers for auto-literal generation. Datatypes
276
+ like dates, intervals, and others may still require manual
277
+ formatting, typically using :meth:`.Operations.inline_literal`.
278
+
279
+ .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy
280
+ versions prior to 0.8 where this feature is not supported.
281
+
282
+ .. seealso::
283
+
284
+ :meth:`.Operations.inline_literal`
285
+
286
+ :param starting_rev: Override the "starting revision" argument
287
+ when using ``--sql`` mode.
288
+ :param tag: a string tag for usage by custom ``env.py`` scripts.
289
+ Set via the ``--tag`` option, can be overridden here.
290
+ :param template_args: dictionary of template arguments which
291
+ will be added to the template argument environment when
292
+ running the "revision" command. Note that the script environment
293
+ is only run within the "revision" command if the --autogenerate
294
+ option is used, or if the option "revision_environment=true"
295
+ is present in the alembic.ini file.
296
+
297
+ :param version_table: The name of the Alembic version table.
298
+ The default is ``'alembic_version'``.
299
+ :param version_table_schema: Optional schema to place version
300
+ table within.
301
+ :param version_table_pk: boolean, whether the Alembic version table
302
+ should use a primary key constraint for the "value" column; this
303
+ only takes effect when the table is first created.
304
+ Defaults to True; setting to False should not be necessary and is
305
+ here for backwards compatibility reasons.
306
+ :param on_version_apply: a callable or collection of callables to be
307
+ run for each migration step.
308
+ The callables will be run in the order they are given, once for
309
+ each migration step, after the respective operation has been
310
+ applied but before its transaction is finalized.
311
+ Each callable accepts no positional arguments and the following
312
+ keyword arguments:
313
+
314
+ * ``ctx``: the :class:`.MigrationContext` running the migration,
315
+ * ``step``: a :class:`.MigrationInfo` representing the
316
+ step currently being applied,
317
+ * ``heads``: a collection of version strings representing the
318
+ current heads,
319
+ * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`.
320
+
321
+ Parameters specific to the autogenerate feature, when
322
+ ``alembic revision`` is run with the ``--autogenerate`` feature:
323
+
324
+ :param target_metadata: a :class:`sqlalchemy.schema.MetaData`
325
+ object, or a sequence of :class:`~sqlalchemy.schema.MetaData`
326
+ objects, that will be consulted during autogeneration.
327
+ The tables present in each :class:`~sqlalchemy.schema.MetaData`
328
+ will be compared against
329
+ what is locally available on the target
330
+ :class:`~sqlalchemy.engine.Connection`
331
+ to produce candidate upgrade/downgrade operations.
332
+ :param compare_type: Indicates type comparison behavior during
333
+ an autogenerate
334
+ operation. Defaults to ``True`` turning on type comparison, which
335
+ has good accuracy on most backends. See :ref:`compare_types`
336
+ for an example as well as information on other type
337
+ comparison options. Set to ``False`` which disables type
338
+ comparison. A callable can also be passed to provide custom type
339
+ comparison, see :ref:`compare_types` for additional details.
340
+
341
+ .. versionchanged:: 1.12.0 The default value of
342
+ :paramref:`.EnvironmentContext.configure.compare_type` has been
343
+ changed to ``True``.
344
+
345
+ .. seealso::
346
+
347
+ :ref:`compare_types`
348
+
349
+ :paramref:`.EnvironmentContext.configure.compare_server_default`
350
+
351
+ :param compare_server_default: Indicates server default comparison
352
+ behavior during
353
+ an autogenerate operation. Defaults to ``False`` which disables
354
+ server default
355
+ comparison. Set to ``True`` to turn on server default comparison,
356
+ which has
357
+ varied accuracy depending on backend.
358
+
359
+ To customize server default comparison behavior, a callable may
360
+ be specified
361
+ which can filter server default comparisons during an
362
+ autogenerate operation.
363
+ defaults during an autogenerate operation. The format of this
364
+ callable is::
365
+
366
+ def my_compare_server_default(context, inspected_column,
367
+ metadata_column, inspected_default, metadata_default,
368
+ rendered_metadata_default):
369
+ # return True if the defaults are different,
370
+ # False if not, or None to allow the default implementation
371
+ # to compare these defaults
372
+ return None
373
+
374
+ context.configure(
375
+ # ...
376
+ compare_server_default = my_compare_server_default
377
+ )
378
+
379
+ ``inspected_column`` is a dictionary structure as returned by
380
+ :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
381
+ ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
382
+ the local model environment.
383
+
384
+ A return value of ``None`` indicates to allow default server default
385
+ comparison
386
+ to proceed. Note that some backends such as Postgresql actually
387
+ execute
388
+ the two defaults on the database side to compare for equivalence.
389
+
390
+ .. seealso::
391
+
392
+ :paramref:`.EnvironmentContext.configure.compare_type`
393
+
394
+ :param include_name: A callable function which is given
395
+ the chance to return ``True`` or ``False`` for any database reflected
396
+ object based on its name, including database schema names when
397
+ the :paramref:`.EnvironmentContext.configure.include_schemas` flag
398
+ is set to ``True``.
399
+
400
+ The function accepts the following positional arguments:
401
+
402
+ * ``name``: the name of the object, such as schema name or table name.
403
+ Will be ``None`` when indicating the default schema name of the
404
+ database connection.
405
+ * ``type``: a string describing the type of object; currently
406
+ ``"schema"``, ``"table"``, ``"column"``, ``"index"``,
407
+ ``"unique_constraint"``, or ``"foreign_key_constraint"``
408
+ * ``parent_names``: a dictionary of "parent" object names, that are
409
+ relative to the name being given. Keys in this dictionary may
410
+ include: ``"schema_name"``, ``"table_name"`` or
411
+ ``"schema_qualified_table_name"``.
412
+
413
+ E.g.::
414
+
415
+ def include_name(name, type_, parent_names):
416
+ if type_ == "schema":
417
+ return name in ["schema_one", "schema_two"]
418
+ else:
419
+ return True
420
+
421
+ context.configure(
422
+ # ...
423
+ include_schemas = True,
424
+ include_name = include_name
425
+ )
426
+
427
+ .. seealso::
428
+
429
+ :ref:`autogenerate_include_hooks`
430
+
431
+ :paramref:`.EnvironmentContext.configure.include_object`
432
+
433
+ :paramref:`.EnvironmentContext.configure.include_schemas`
434
+
435
+
436
+ :param include_object: A callable function which is given
437
+ the chance to return ``True`` or ``False`` for any object,
438
+ indicating if the given object should be considered in the
439
+ autogenerate sweep.
440
+
441
+ The function accepts the following positional arguments:
442
+
443
+ * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such
444
+ as a :class:`~sqlalchemy.schema.Table`,
445
+ :class:`~sqlalchemy.schema.Column`,
446
+ :class:`~sqlalchemy.schema.Index`
447
+ :class:`~sqlalchemy.schema.UniqueConstraint`,
448
+ or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object
449
+ * ``name``: the name of the object. This is typically available
450
+ via ``object.name``.
451
+ * ``type``: a string describing the type of object; currently
452
+ ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``,
453
+ or ``"foreign_key_constraint"``
454
+ * ``reflected``: ``True`` if the given object was produced based on
455
+ table reflection, ``False`` if it's from a local :class:`.MetaData`
456
+ object.
457
+ * ``compare_to``: the object being compared against, if available,
458
+ else ``None``.
459
+
460
+ E.g.::
461
+
462
+ def include_object(object, name, type_, reflected, compare_to):
463
+ if (type_ == "column" and
464
+ not reflected and
465
+ object.info.get("skip_autogenerate", False)):
466
+ return False
467
+ else:
468
+ return True
469
+
470
+ context.configure(
471
+ # ...
472
+ include_object = include_object
473
+ )
474
+
475
+ For the use case of omitting specific schemas from a target database
476
+ when :paramref:`.EnvironmentContext.configure.include_schemas` is
477
+ set to ``True``, the :attr:`~sqlalchemy.schema.Table.schema`
478
+ attribute can be checked for each :class:`~sqlalchemy.schema.Table`
479
+ object passed to the hook, however it is much more efficient
480
+ to filter on schemas before reflection of objects takes place
481
+ using the :paramref:`.EnvironmentContext.configure.include_name`
482
+ hook.
483
+
484
+ .. seealso::
485
+
486
+ :ref:`autogenerate_include_hooks`
487
+
488
+ :paramref:`.EnvironmentContext.configure.include_name`
489
+
490
+ :paramref:`.EnvironmentContext.configure.include_schemas`
491
+
492
+ :param render_as_batch: if True, commands which alter elements
493
+ within a table will be placed under a ``with batch_alter_table():``
494
+ directive, so that batch migrations will take place.
495
+
496
+ .. seealso::
497
+
498
+ :ref:`batch_migrations`
499
+
500
+ :param include_schemas: If True, autogenerate will scan across
501
+ all schemas located by the SQLAlchemy
502
+ :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names`
503
+ method, and include all differences in tables found across all
504
+ those schemas. When using this option, you may want to also
505
+ use the :paramref:`.EnvironmentContext.configure.include_name`
506
+ parameter to specify a callable which
507
+ can filter the tables/schemas that get included.
508
+
509
+ .. seealso::
510
+
511
+ :ref:`autogenerate_include_hooks`
512
+
513
+ :paramref:`.EnvironmentContext.configure.include_name`
514
+
515
+ :paramref:`.EnvironmentContext.configure.include_object`
516
+
517
+ :param render_item: Callable that can be used to override how
518
+ any schema item, i.e. column, constraint, type,
519
+ etc., is rendered for autogenerate. The callable receives a
520
+ string describing the type of object, the object, and
521
+ the autogen context. If it returns False, the
522
+ default rendering method will be used. If it returns None,
523
+ the item will not be rendered in the context of a Table
524
+ construct, that is, can be used to skip columns or constraints
525
+ within op.create_table()::
526
+
527
+ def my_render_column(type_, col, autogen_context):
528
+ if type_ == "column" and isinstance(col, MySpecialCol):
529
+ return repr(col)
530
+ else:
531
+ return False
532
+
533
+ context.configure(
534
+ # ...
535
+ render_item = my_render_column
536
+ )
537
+
538
+ Available values for the type string include: ``"column"``,
539
+ ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``,
540
+ ``"type"``, ``"server_default"``.
541
+
542
+ .. seealso::
543
+
544
+ :ref:`autogen_render_types`
545
+
546
+ :param upgrade_token: When autogenerate completes, the text of the
547
+ candidate upgrade operations will be present in this template
548
+ variable when ``script.py.mako`` is rendered. Defaults to
549
+ ``upgrades``.
550
+ :param downgrade_token: When autogenerate completes, the text of the
551
+ candidate downgrade operations will be present in this
552
+ template variable when ``script.py.mako`` is rendered. Defaults to
553
+ ``downgrades``.
554
+
555
+ :param alembic_module_prefix: When autogenerate refers to Alembic
556
+ :mod:`alembic.operations` constructs, this prefix will be used
557
+ (i.e. ``op.create_table``) Defaults to "``op.``".
558
+ Can be ``None`` to indicate no prefix.
559
+
560
+ :param sqlalchemy_module_prefix: When autogenerate refers to
561
+ SQLAlchemy
562
+ :class:`~sqlalchemy.schema.Column` or type classes, this prefix
563
+ will be used
564
+ (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``".
565
+ Can be ``None`` to indicate no prefix.
566
+ Note that when dialect-specific types are rendered, autogenerate
567
+ will render them using the dialect module name, i.e. ``mssql.BIT()``,
568
+ ``postgresql.UUID()``.
569
+
570
+ :param user_module_prefix: When autogenerate refers to a SQLAlchemy
571
+ type (e.g. :class:`.TypeEngine`) where the module name is not
572
+ under the ``sqlalchemy`` namespace, this prefix will be used
573
+ within autogenerate. If left at its default of
574
+ ``None``, the ``__module__`` attribute of the type is used to
575
+ render the import module. It's a good practice to set this
576
+ and to have all custom types be available from a fixed module space,
577
+ in order to future-proof migration files against reorganizations
578
+ in modules.
579
+
580
+ .. seealso::
581
+
582
+ :ref:`autogen_module_prefix`
583
+
584
+ :param process_revision_directives: a callable function that will
585
+ be passed a structure representing the end result of an autogenerate
586
+ or plain "revision" operation, which can be manipulated to affect
587
+ how the ``alembic revision`` command ultimately outputs new
588
+ revision scripts. The structure of the callable is::
589
+
590
+ def process_revision_directives(context, revision, directives):
591
+ pass
592
+
593
+ The ``directives`` parameter is a Python list containing
594
+ a single :class:`.MigrationScript` directive, which represents
595
+ the revision file to be generated. This list as well as its
596
+ contents may be freely modified to produce any set of commands.
597
+ The section :ref:`customizing_revision` shows an example of
598
+ doing this. The ``context`` parameter is the
599
+ :class:`.MigrationContext` in use,
600
+ and ``revision`` is a tuple of revision identifiers representing the
601
+ current revision of the database.
602
+
603
+ The callable is invoked at all times when the ``--autogenerate``
604
+ option is passed to ``alembic revision``. If ``--autogenerate``
605
+ is not passed, the callable is invoked only if the
606
+ ``revision_environment`` variable is set to True in the Alembic
607
+ configuration, in which case the given ``directives`` collection
608
+ will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps`
609
+ collections for ``.upgrade_ops`` and ``.downgrade_ops``. The
610
+ ``--autogenerate`` option itself can be inferred by inspecting
611
+ ``context.config.cmd_opts.autogenerate``.
612
+
613
+ The callable function may optionally be an instance of
614
+ a :class:`.Rewriter` object. This is a helper object that
615
+ assists in the production of autogenerate-stream rewriter functions.
616
+
617
+ .. seealso::
618
+
619
+ :ref:`customizing_revision`
620
+
621
+ :ref:`autogen_rewriter`
622
+
623
+ :paramref:`.command.revision.process_revision_directives`
624
+
625
+ Parameters specific to individual backends:
626
+
627
+ :param mssql_batch_separator: The "batch separator" which will
628
+ be placed between each statement when generating offline SQL Server
629
+ migrations. Defaults to ``GO``. Note this is in addition to the
630
+ customary semicolon ``;`` at the end of each statement; SQL Server
631
+ considers the "batch separator" to denote the end of an
632
+ individual statement execution, and cannot group certain
633
+ dependent operations in one step.
634
+ :param oracle_batch_separator: The "batch separator" which will
635
+ be placed between each statement when generating offline
636
+ Oracle migrations. Defaults to ``/``. Oracle doesn't add a
637
+ semicolon between statements like most other backends.
638
+
639
+ """
640
+
641
+ def execute(
642
+ sql: Union[Executable, str],
643
+ execution_options: Optional[Dict[str, Any]] = None,
644
+ ) -> None:
645
+ """Execute the given SQL using the current change context.
646
+
647
+ The behavior of :meth:`.execute` is the same
648
+ as that of :meth:`.Operations.execute`. Please see that
649
+ function's documentation for full detail including
650
+ caveats and limitations.
651
+
652
+ This function requires that a :class:`.MigrationContext` has
653
+ first been made available via :meth:`.configure`.
654
+
655
+ """
656
+
657
+ def get_bind() -> Connection:
658
+ """Return the current 'bind'.
659
+
660
+ In "online" mode, this is the
661
+ :class:`sqlalchemy.engine.Connection` currently being used
662
+ to emit SQL to the database.
663
+
664
+ This function requires that a :class:`.MigrationContext`
665
+ has first been made available via :meth:`.configure`.
666
+
667
+ """
668
+
669
+ def get_context() -> MigrationContext:
670
+ """Return the current :class:`.MigrationContext` object.
671
+
672
+ If :meth:`.EnvironmentContext.configure` has not been
673
+ called yet, raises an exception.
674
+
675
+ """
676
+
677
+ def get_head_revision() -> Union[str, Tuple[str, ...], None]:
678
+ """Return the hex identifier of the 'head' script revision.
679
+
680
+ If the script directory has multiple heads, this
681
+ method raises a :class:`.CommandError`;
682
+ :meth:`.EnvironmentContext.get_head_revisions` should be preferred.
683
+
684
+ This function does not require that the :class:`.MigrationContext`
685
+ has been configured.
686
+
687
+ .. seealso:: :meth:`.EnvironmentContext.get_head_revisions`
688
+
689
+ """
690
+
691
+ def get_head_revisions() -> Union[str, Tuple[str, ...], None]:
692
+ """Return the hex identifier of the 'heads' script revision(s).
693
+
694
+ This returns a tuple containing the version number of all
695
+ heads in the script directory.
696
+
697
+ This function does not require that the :class:`.MigrationContext`
698
+ has been configured.
699
+
700
+ """
701
+
702
+ def get_revision_argument() -> Union[str, Tuple[str, ...], None]:
703
+ """Get the 'destination' revision argument.
704
+
705
+ This is typically the argument passed to the
706
+ ``upgrade`` or ``downgrade`` command.
707
+
708
+ If it was specified as ``head``, the actual
709
+ version number is returned; if specified
710
+ as ``base``, ``None`` is returned.
711
+
712
+ This function does not require that the :class:`.MigrationContext`
713
+ has been configured.
714
+
715
+ """
716
+
717
+ def get_starting_revision_argument() -> Union[str, Tuple[str, ...], None]:
718
+ """Return the 'starting revision' argument,
719
+ if the revision was passed using ``start:end``.
720
+
721
+ This is only meaningful in "offline" mode.
722
+ Returns ``None`` if no value is available
723
+ or was configured.
724
+
725
+ This function does not require that the :class:`.MigrationContext`
726
+ has been configured.
727
+
728
+ """
729
+
730
+ def get_tag_argument() -> Optional[str]:
731
+ """Return the value passed for the ``--tag`` argument, if any.
732
+
733
+ The ``--tag`` argument is not used directly by Alembic,
734
+ but is available for custom ``env.py`` configurations that
735
+ wish to use it; particularly for offline generation scripts
736
+ that wish to generate tagged filenames.
737
+
738
+ This function does not require that the :class:`.MigrationContext`
739
+ has been configured.
740
+
741
+ .. seealso::
742
+
743
+ :meth:`.EnvironmentContext.get_x_argument` - a newer and more
744
+ open ended system of extending ``env.py`` scripts via the command
745
+ line.
746
+
747
+ """
748
+
749
+ @overload
750
+ def get_x_argument(as_dictionary: Literal[False]) -> List[str]: ...
751
+ @overload
752
+ def get_x_argument(as_dictionary: Literal[True]) -> Dict[str, str]: ...
753
+ @overload
754
+ def get_x_argument(
755
+ as_dictionary: bool = ...,
756
+ ) -> Union[List[str], Dict[str, str]]:
757
+ """Return the value(s) passed for the ``-x`` argument, if any.
758
+
759
+ The ``-x`` argument is an open ended flag that allows any user-defined
760
+ value or values to be passed on the command line, then available
761
+ here for consumption by a custom ``env.py`` script.
762
+
763
+ The return value is a list, returned directly from the ``argparse``
764
+ structure. If ``as_dictionary=True`` is passed, the ``x`` arguments
765
+ are parsed using ``key=value`` format into a dictionary that is
766
+ then returned. If there is no ``=`` in the argument, value is an empty
767
+ string.
768
+
769
+ .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when
770
+ arguments are passed without the ``=`` symbol.
771
+
772
+ For example, to support passing a database URL on the command line,
773
+ the standard ``env.py`` script can be modified like this::
774
+
775
+ cmd_line_url = context.get_x_argument(
776
+ as_dictionary=True).get('dbname')
777
+ if cmd_line_url:
778
+ engine = create_engine(cmd_line_url)
779
+ else:
780
+ engine = engine_from_config(
781
+ config.get_section(config.config_ini_section),
782
+ prefix='sqlalchemy.',
783
+ poolclass=pool.NullPool)
784
+
785
+ This then takes effect by running the ``alembic`` script as::
786
+
787
+ alembic -x dbname=postgresql://user:pass@host/dbname upgrade head
788
+
789
+ This function does not require that the :class:`.MigrationContext`
790
+ has been configured.
791
+
792
+ .. seealso::
793
+
794
+ :meth:`.EnvironmentContext.get_tag_argument`
795
+
796
+ :attr:`.Config.cmd_opts`
797
+
798
+ """
799
+
800
+ def is_offline_mode() -> bool:
801
+ """Return True if the current migrations environment
802
+ is running in "offline mode".
803
+
804
+ This is ``True`` or ``False`` depending
805
+ on the ``--sql`` flag passed.
806
+
807
+ This function does not require that the :class:`.MigrationContext`
808
+ has been configured.
809
+
810
+ """
811
+
812
+ def is_transactional_ddl() -> bool:
813
+ """Return True if the context is configured to expect a
814
+ transactional DDL capable backend.
815
+
816
+ This defaults to the type of database in use, and
817
+ can be overridden by the ``transactional_ddl`` argument
818
+ to :meth:`.configure`
819
+
820
+ This function requires that a :class:`.MigrationContext`
821
+ has first been made available via :meth:`.configure`.
822
+
823
+ """
824
+
825
+ def run_migrations(**kw: Any) -> None:
826
+ """Run migrations as determined by the current command line
827
+ configuration
828
+ as well as versioning information present (or not) in the current
829
+ database connection (if one is present).
830
+
831
+ The function accepts optional ``**kw`` arguments. If these are
832
+ passed, they are sent directly to the ``upgrade()`` and
833
+ ``downgrade()``
834
+ functions within each target revision file. By modifying the
835
+ ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
836
+ functions accept arguments, parameters can be passed here so that
837
+ contextual information, usually information to identify a particular
838
+ database in use, can be passed from a custom ``env.py`` script
839
+ to the migration functions.
840
+
841
+ This function requires that a :class:`.MigrationContext` has
842
+ first been made available via :meth:`.configure`.
843
+
844
+ """
845
+
846
+ script: ScriptDirectory
847
+
848
+ def static_output(text: str) -> None:
849
+ """Emit text directly to the "offline" SQL stream.
850
+
851
+ Typically this is for emitting comments that
852
+ start with --. The statement is not treated
853
+ as a SQL execution, no ; or batch separator
854
+ is added, etc.
855
+
856
+ """
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from . import mssql
2
+ from . import mysql
3
+ from . import oracle
4
+ from . import postgresql
5
+ from . import sqlite
6
+ from .impl import DefaultImpl as DefaultImpl
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/_autogen.py ADDED
@@ -0,0 +1,329 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ from typing import Any
7
+ from typing import ClassVar
8
+ from typing import Dict
9
+ from typing import Generic
10
+ from typing import NamedTuple
11
+ from typing import Optional
12
+ from typing import Sequence
13
+ from typing import Tuple
14
+ from typing import Type
15
+ from typing import TYPE_CHECKING
16
+ from typing import TypeVar
17
+ from typing import Union
18
+
19
+ from sqlalchemy.sql.schema import Constraint
20
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
21
+ from sqlalchemy.sql.schema import Index
22
+ from sqlalchemy.sql.schema import UniqueConstraint
23
+ from typing_extensions import TypeGuard
24
+
25
+ from .. import util
26
+ from ..util import sqla_compat
27
+
28
+ if TYPE_CHECKING:
29
+ from typing import Literal
30
+
31
+ from alembic.autogenerate.api import AutogenContext
32
+ from alembic.ddl.impl import DefaultImpl
33
+
34
+ CompareConstraintType = Union[Constraint, Index]
35
+
36
+ _C = TypeVar("_C", bound=CompareConstraintType)
37
+
38
+ _clsreg: Dict[str, Type[_constraint_sig]] = {}
39
+
40
+
41
+ class ComparisonResult(NamedTuple):
42
+ status: Literal["equal", "different", "skip"]
43
+ message: str
44
+
45
+ @property
46
+ def is_equal(self) -> bool:
47
+ return self.status == "equal"
48
+
49
+ @property
50
+ def is_different(self) -> bool:
51
+ return self.status == "different"
52
+
53
+ @property
54
+ def is_skip(self) -> bool:
55
+ return self.status == "skip"
56
+
57
+ @classmethod
58
+ def Equal(cls) -> ComparisonResult:
59
+ """the constraints are equal."""
60
+ return cls("equal", "The two constraints are equal")
61
+
62
+ @classmethod
63
+ def Different(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult:
64
+ """the constraints are different for the provided reason(s)."""
65
+ return cls("different", ", ".join(util.to_list(reason)))
66
+
67
+ @classmethod
68
+ def Skip(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult:
69
+ """the constraint cannot be compared for the provided reason(s).
70
+
71
+ The message is logged, but the constraints will be otherwise
72
+ considered equal, meaning that no migration command will be
73
+ generated.
74
+ """
75
+ return cls("skip", ", ".join(util.to_list(reason)))
76
+
77
+
78
+ class _constraint_sig(Generic[_C]):
79
+ const: _C
80
+
81
+ _sig: Tuple[Any, ...]
82
+ name: Optional[sqla_compat._ConstraintNameDefined]
83
+
84
+ impl: DefaultImpl
85
+
86
+ _is_index: ClassVar[bool] = False
87
+ _is_fk: ClassVar[bool] = False
88
+ _is_uq: ClassVar[bool] = False
89
+
90
+ _is_metadata: bool
91
+
92
+ def __init_subclass__(cls) -> None:
93
+ cls._register()
94
+
95
+ @classmethod
96
+ def _register(cls):
97
+ raise NotImplementedError()
98
+
99
+ def __init__(
100
+ self, is_metadata: bool, impl: DefaultImpl, const: _C
101
+ ) -> None:
102
+ raise NotImplementedError()
103
+
104
+ def compare_to_reflected(
105
+ self, other: _constraint_sig[Any]
106
+ ) -> ComparisonResult:
107
+ assert self.impl is other.impl
108
+ assert self._is_metadata
109
+ assert not other._is_metadata
110
+
111
+ return self._compare_to_reflected(other)
112
+
113
+ def _compare_to_reflected(
114
+ self, other: _constraint_sig[_C]
115
+ ) -> ComparisonResult:
116
+ raise NotImplementedError()
117
+
118
+ @classmethod
119
+ def from_constraint(
120
+ cls, is_metadata: bool, impl: DefaultImpl, constraint: _C
121
+ ) -> _constraint_sig[_C]:
122
+ # these could be cached by constraint/impl, however, if the
123
+ # constraint is modified in place, then the sig is wrong. the mysql
124
+ # impl currently does this, and if we fixed that we can't be sure
125
+ # someone else might do it too, so play it safe.
126
+ sig = _clsreg[constraint.__visit_name__](is_metadata, impl, constraint)
127
+ return sig
128
+
129
+ def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]:
130
+ return sqla_compat._get_constraint_final_name(
131
+ self.const, context.dialect
132
+ )
133
+
134
+ @util.memoized_property
135
+ def is_named(self):
136
+ return sqla_compat._constraint_is_named(self.const, self.impl.dialect)
137
+
138
+ @util.memoized_property
139
+ def unnamed(self) -> Tuple[Any, ...]:
140
+ return self._sig
141
+
142
+ @util.memoized_property
143
+ def unnamed_no_options(self) -> Tuple[Any, ...]:
144
+ raise NotImplementedError()
145
+
146
+ @util.memoized_property
147
+ def _full_sig(self) -> Tuple[Any, ...]:
148
+ return (self.name,) + self.unnamed
149
+
150
+ def __eq__(self, other) -> bool:
151
+ return self._full_sig == other._full_sig
152
+
153
+ def __ne__(self, other) -> bool:
154
+ return self._full_sig != other._full_sig
155
+
156
+ def __hash__(self) -> int:
157
+ return hash(self._full_sig)
158
+
159
+
160
+ class _uq_constraint_sig(_constraint_sig[UniqueConstraint]):
161
+ _is_uq = True
162
+
163
+ @classmethod
164
+ def _register(cls) -> None:
165
+ _clsreg["unique_constraint"] = cls
166
+
167
+ is_unique = True
168
+
169
+ def __init__(
170
+ self,
171
+ is_metadata: bool,
172
+ impl: DefaultImpl,
173
+ const: UniqueConstraint,
174
+ ) -> None:
175
+ self.impl = impl
176
+ self.const = const
177
+ self.name = sqla_compat.constraint_name_or_none(const.name)
178
+ self._sig = tuple(sorted([col.name for col in const.columns]))
179
+ self._is_metadata = is_metadata
180
+
181
+ @property
182
+ def column_names(self) -> Tuple[str, ...]:
183
+ return tuple([col.name for col in self.const.columns])
184
+
185
+ def _compare_to_reflected(
186
+ self, other: _constraint_sig[_C]
187
+ ) -> ComparisonResult:
188
+ assert self._is_metadata
189
+ metadata_obj = self
190
+ conn_obj = other
191
+
192
+ assert is_uq_sig(conn_obj)
193
+ return self.impl.compare_unique_constraint(
194
+ metadata_obj.const, conn_obj.const
195
+ )
196
+
197
+
198
+ class _ix_constraint_sig(_constraint_sig[Index]):
199
+ _is_index = True
200
+
201
+ name: sqla_compat._ConstraintName
202
+
203
+ @classmethod
204
+ def _register(cls) -> None:
205
+ _clsreg["index"] = cls
206
+
207
+ def __init__(
208
+ self, is_metadata: bool, impl: DefaultImpl, const: Index
209
+ ) -> None:
210
+ self.impl = impl
211
+ self.const = const
212
+ self.name = const.name
213
+ self.is_unique = bool(const.unique)
214
+ self._is_metadata = is_metadata
215
+
216
+ def _compare_to_reflected(
217
+ self, other: _constraint_sig[_C]
218
+ ) -> ComparisonResult:
219
+ assert self._is_metadata
220
+ metadata_obj = self
221
+ conn_obj = other
222
+
223
+ assert is_index_sig(conn_obj)
224
+ return self.impl.compare_indexes(metadata_obj.const, conn_obj.const)
225
+
226
+ @util.memoized_property
227
+ def has_expressions(self):
228
+ return sqla_compat.is_expression_index(self.const)
229
+
230
+ @util.memoized_property
231
+ def column_names(self) -> Tuple[str, ...]:
232
+ return tuple([col.name for col in self.const.columns])
233
+
234
+ @util.memoized_property
235
+ def column_names_optional(self) -> Tuple[Optional[str], ...]:
236
+ return tuple(
237
+ [getattr(col, "name", None) for col in self.const.expressions]
238
+ )
239
+
240
+ @util.memoized_property
241
+ def is_named(self):
242
+ return True
243
+
244
+ @util.memoized_property
245
+ def unnamed(self):
246
+ return (self.is_unique,) + self.column_names_optional
247
+
248
+
249
+ class _fk_constraint_sig(_constraint_sig[ForeignKeyConstraint]):
250
+ _is_fk = True
251
+
252
+ @classmethod
253
+ def _register(cls) -> None:
254
+ _clsreg["foreign_key_constraint"] = cls
255
+
256
+ def __init__(
257
+ self,
258
+ is_metadata: bool,
259
+ impl: DefaultImpl,
260
+ const: ForeignKeyConstraint,
261
+ ) -> None:
262
+ self._is_metadata = is_metadata
263
+
264
+ self.impl = impl
265
+ self.const = const
266
+
267
+ self.name = sqla_compat.constraint_name_or_none(const.name)
268
+
269
+ (
270
+ self.source_schema,
271
+ self.source_table,
272
+ self.source_columns,
273
+ self.target_schema,
274
+ self.target_table,
275
+ self.target_columns,
276
+ onupdate,
277
+ ondelete,
278
+ deferrable,
279
+ initially,
280
+ ) = sqla_compat._fk_spec(const)
281
+
282
+ self._sig: Tuple[Any, ...] = (
283
+ self.source_schema,
284
+ self.source_table,
285
+ tuple(self.source_columns),
286
+ self.target_schema,
287
+ self.target_table,
288
+ tuple(self.target_columns),
289
+ ) + (
290
+ (
291
+ (None if onupdate.lower() == "no action" else onupdate.lower())
292
+ if onupdate
293
+ else None
294
+ ),
295
+ (
296
+ (None if ondelete.lower() == "no action" else ondelete.lower())
297
+ if ondelete
298
+ else None
299
+ ),
300
+ # convert initially + deferrable into one three-state value
301
+ (
302
+ "initially_deferrable"
303
+ if initially and initially.lower() == "deferred"
304
+ else "deferrable" if deferrable else "not deferrable"
305
+ ),
306
+ )
307
+
308
+ @util.memoized_property
309
+ def unnamed_no_options(self):
310
+ return (
311
+ self.source_schema,
312
+ self.source_table,
313
+ tuple(self.source_columns),
314
+ self.target_schema,
315
+ self.target_table,
316
+ tuple(self.target_columns),
317
+ )
318
+
319
+
320
+ def is_index_sig(sig: _constraint_sig) -> TypeGuard[_ix_constraint_sig]:
321
+ return sig._is_index
322
+
323
+
324
+ def is_uq_sig(sig: _constraint_sig) -> TypeGuard[_uq_constraint_sig]:
325
+ return sig._is_uq
326
+
327
+
328
+ def is_fk_sig(sig: _constraint_sig) -> TypeGuard[_fk_constraint_sig]:
329
+ return sig._is_fk
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/base.py ADDED
@@ -0,0 +1,364 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import functools
7
+ from typing import Optional
8
+ from typing import TYPE_CHECKING
9
+ from typing import Union
10
+
11
+ from sqlalchemy import exc
12
+ from sqlalchemy import Integer
13
+ from sqlalchemy import types as sqltypes
14
+ from sqlalchemy.ext.compiler import compiles
15
+ from sqlalchemy.schema import Column
16
+ from sqlalchemy.schema import DDLElement
17
+ from sqlalchemy.sql.elements import quoted_name
18
+
19
+ from ..util.sqla_compat import _columns_for_constraint # noqa
20
+ from ..util.sqla_compat import _find_columns # noqa
21
+ from ..util.sqla_compat import _fk_spec # noqa
22
+ from ..util.sqla_compat import _is_type_bound # noqa
23
+ from ..util.sqla_compat import _table_for_constraint # noqa
24
+
25
+ if TYPE_CHECKING:
26
+ from typing import Any
27
+
28
+ from sqlalchemy import Computed
29
+ from sqlalchemy import Identity
30
+ from sqlalchemy.sql.compiler import Compiled
31
+ from sqlalchemy.sql.compiler import DDLCompiler
32
+ from sqlalchemy.sql.elements import TextClause
33
+ from sqlalchemy.sql.functions import Function
34
+ from sqlalchemy.sql.schema import FetchedValue
35
+ from sqlalchemy.sql.type_api import TypeEngine
36
+
37
+ from .impl import DefaultImpl
38
+
39
+ _ServerDefault = Union["TextClause", "FetchedValue", "Function[Any]", str]
40
+
41
+
42
+ class AlterTable(DDLElement):
43
+ """Represent an ALTER TABLE statement.
44
+
45
+ Only the string name and optional schema name of the table
46
+ is required, not a full Table object.
47
+
48
+ """
49
+
50
+ def __init__(
51
+ self,
52
+ table_name: str,
53
+ schema: Optional[Union[quoted_name, str]] = None,
54
+ ) -> None:
55
+ self.table_name = table_name
56
+ self.schema = schema
57
+
58
+
59
+ class RenameTable(AlterTable):
60
+ def __init__(
61
+ self,
62
+ old_table_name: str,
63
+ new_table_name: Union[quoted_name, str],
64
+ schema: Optional[Union[quoted_name, str]] = None,
65
+ ) -> None:
66
+ super().__init__(old_table_name, schema=schema)
67
+ self.new_table_name = new_table_name
68
+
69
+
70
+ class AlterColumn(AlterTable):
71
+ def __init__(
72
+ self,
73
+ name: str,
74
+ column_name: str,
75
+ schema: Optional[str] = None,
76
+ existing_type: Optional[TypeEngine] = None,
77
+ existing_nullable: Optional[bool] = None,
78
+ existing_server_default: Optional[_ServerDefault] = None,
79
+ existing_comment: Optional[str] = None,
80
+ ) -> None:
81
+ super().__init__(name, schema=schema)
82
+ self.column_name = column_name
83
+ self.existing_type = (
84
+ sqltypes.to_instance(existing_type)
85
+ if existing_type is not None
86
+ else None
87
+ )
88
+ self.existing_nullable = existing_nullable
89
+ self.existing_server_default = existing_server_default
90
+ self.existing_comment = existing_comment
91
+
92
+
93
+ class ColumnNullable(AlterColumn):
94
+ def __init__(
95
+ self, name: str, column_name: str, nullable: bool, **kw
96
+ ) -> None:
97
+ super().__init__(name, column_name, **kw)
98
+ self.nullable = nullable
99
+
100
+
101
+ class ColumnType(AlterColumn):
102
+ def __init__(
103
+ self, name: str, column_name: str, type_: TypeEngine, **kw
104
+ ) -> None:
105
+ super().__init__(name, column_name, **kw)
106
+ self.type_ = sqltypes.to_instance(type_)
107
+
108
+
109
+ class ColumnName(AlterColumn):
110
+ def __init__(
111
+ self, name: str, column_name: str, newname: str, **kw
112
+ ) -> None:
113
+ super().__init__(name, column_name, **kw)
114
+ self.newname = newname
115
+
116
+
117
+ class ColumnDefault(AlterColumn):
118
+ def __init__(
119
+ self,
120
+ name: str,
121
+ column_name: str,
122
+ default: Optional[_ServerDefault],
123
+ **kw,
124
+ ) -> None:
125
+ super().__init__(name, column_name, **kw)
126
+ self.default = default
127
+
128
+
129
+ class ComputedColumnDefault(AlterColumn):
130
+ def __init__(
131
+ self, name: str, column_name: str, default: Optional[Computed], **kw
132
+ ) -> None:
133
+ super().__init__(name, column_name, **kw)
134
+ self.default = default
135
+
136
+
137
+ class IdentityColumnDefault(AlterColumn):
138
+ def __init__(
139
+ self,
140
+ name: str,
141
+ column_name: str,
142
+ default: Optional[Identity],
143
+ impl: DefaultImpl,
144
+ **kw,
145
+ ) -> None:
146
+ super().__init__(name, column_name, **kw)
147
+ self.default = default
148
+ self.impl = impl
149
+
150
+
151
+ class AddColumn(AlterTable):
152
+ def __init__(
153
+ self,
154
+ name: str,
155
+ column: Column[Any],
156
+ schema: Optional[Union[quoted_name, str]] = None,
157
+ if_not_exists: Optional[bool] = None,
158
+ ) -> None:
159
+ super().__init__(name, schema=schema)
160
+ self.column = column
161
+ self.if_not_exists = if_not_exists
162
+
163
+
164
+ class DropColumn(AlterTable):
165
+ def __init__(
166
+ self,
167
+ name: str,
168
+ column: Column[Any],
169
+ schema: Optional[str] = None,
170
+ if_exists: Optional[bool] = None,
171
+ ) -> None:
172
+ super().__init__(name, schema=schema)
173
+ self.column = column
174
+ self.if_exists = if_exists
175
+
176
+
177
+ class ColumnComment(AlterColumn):
178
+ def __init__(
179
+ self, name: str, column_name: str, comment: Optional[str], **kw
180
+ ) -> None:
181
+ super().__init__(name, column_name, **kw)
182
+ self.comment = comment
183
+
184
+
185
+ @compiles(RenameTable)
186
+ def visit_rename_table(
187
+ element: RenameTable, compiler: DDLCompiler, **kw
188
+ ) -> str:
189
+ return "%s RENAME TO %s" % (
190
+ alter_table(compiler, element.table_name, element.schema),
191
+ format_table_name(compiler, element.new_table_name, element.schema),
192
+ )
193
+
194
+
195
+ @compiles(AddColumn)
196
+ def visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) -> str:
197
+ return "%s %s" % (
198
+ alter_table(compiler, element.table_name, element.schema),
199
+ add_column(
200
+ compiler, element.column, if_not_exists=element.if_not_exists, **kw
201
+ ),
202
+ )
203
+
204
+
205
+ @compiles(DropColumn)
206
+ def visit_drop_column(element: DropColumn, compiler: DDLCompiler, **kw) -> str:
207
+ return "%s %s" % (
208
+ alter_table(compiler, element.table_name, element.schema),
209
+ drop_column(
210
+ compiler, element.column.name, if_exists=element.if_exists, **kw
211
+ ),
212
+ )
213
+
214
+
215
+ @compiles(ColumnNullable)
216
+ def visit_column_nullable(
217
+ element: ColumnNullable, compiler: DDLCompiler, **kw
218
+ ) -> str:
219
+ return "%s %s %s" % (
220
+ alter_table(compiler, element.table_name, element.schema),
221
+ alter_column(compiler, element.column_name),
222
+ "DROP NOT NULL" if element.nullable else "SET NOT NULL",
223
+ )
224
+
225
+
226
+ @compiles(ColumnType)
227
+ def visit_column_type(element: ColumnType, compiler: DDLCompiler, **kw) -> str:
228
+ return "%s %s %s" % (
229
+ alter_table(compiler, element.table_name, element.schema),
230
+ alter_column(compiler, element.column_name),
231
+ "TYPE %s" % format_type(compiler, element.type_),
232
+ )
233
+
234
+
235
+ @compiles(ColumnName)
236
+ def visit_column_name(element: ColumnName, compiler: DDLCompiler, **kw) -> str:
237
+ return "%s RENAME %s TO %s" % (
238
+ alter_table(compiler, element.table_name, element.schema),
239
+ format_column_name(compiler, element.column_name),
240
+ format_column_name(compiler, element.newname),
241
+ )
242
+
243
+
244
+ @compiles(ColumnDefault)
245
+ def visit_column_default(
246
+ element: ColumnDefault, compiler: DDLCompiler, **kw
247
+ ) -> str:
248
+ return "%s %s %s" % (
249
+ alter_table(compiler, element.table_name, element.schema),
250
+ alter_column(compiler, element.column_name),
251
+ (
252
+ "SET DEFAULT %s" % format_server_default(compiler, element.default)
253
+ if element.default is not None
254
+ else "DROP DEFAULT"
255
+ ),
256
+ )
257
+
258
+
259
+ @compiles(ComputedColumnDefault)
260
+ def visit_computed_column(
261
+ element: ComputedColumnDefault, compiler: DDLCompiler, **kw
262
+ ):
263
+ raise exc.CompileError(
264
+ 'Adding or removing a "computed" construct, e.g. GENERATED '
265
+ "ALWAYS AS, to or from an existing column is not supported."
266
+ )
267
+
268
+
269
+ @compiles(IdentityColumnDefault)
270
+ def visit_identity_column(
271
+ element: IdentityColumnDefault, compiler: DDLCompiler, **kw
272
+ ):
273
+ raise exc.CompileError(
274
+ 'Adding, removing or modifying an "identity" construct, '
275
+ "e.g. GENERATED AS IDENTITY, to or from an existing "
276
+ "column is not supported in this dialect."
277
+ )
278
+
279
+
280
+ def quote_dotted(
281
+ name: Union[quoted_name, str], quote: functools.partial
282
+ ) -> Union[quoted_name, str]:
283
+ """quote the elements of a dotted name"""
284
+
285
+ if isinstance(name, quoted_name):
286
+ return quote(name)
287
+ result = ".".join([quote(x) for x in name.split(".")])
288
+ return result
289
+
290
+
291
+ def format_table_name(
292
+ compiler: Compiled,
293
+ name: Union[quoted_name, str],
294
+ schema: Optional[Union[quoted_name, str]],
295
+ ) -> Union[quoted_name, str]:
296
+ quote = functools.partial(compiler.preparer.quote)
297
+ if schema:
298
+ return quote_dotted(schema, quote) + "." + quote(name)
299
+ else:
300
+ return quote(name)
301
+
302
+
303
+ def format_column_name(
304
+ compiler: DDLCompiler, name: Optional[Union[quoted_name, str]]
305
+ ) -> Union[quoted_name, str]:
306
+ return compiler.preparer.quote(name) # type: ignore[arg-type]
307
+
308
+
309
+ def format_server_default(
310
+ compiler: DDLCompiler,
311
+ default: Optional[_ServerDefault],
312
+ ) -> str:
313
+ # this can be updated to use compiler.render_default_string
314
+ # for SQLAlchemy 2.0 and above; not in 1.4
315
+ default_str = compiler.get_column_default_string(
316
+ Column("x", Integer, server_default=default)
317
+ )
318
+ assert default_str is not None
319
+ return default_str
320
+
321
+
322
+ def format_type(compiler: DDLCompiler, type_: TypeEngine) -> str:
323
+ return compiler.dialect.type_compiler.process(type_)
324
+
325
+
326
+ def alter_table(
327
+ compiler: DDLCompiler,
328
+ name: str,
329
+ schema: Optional[str],
330
+ ) -> str:
331
+ return "ALTER TABLE %s" % format_table_name(compiler, name, schema)
332
+
333
+
334
+ def drop_column(
335
+ compiler: DDLCompiler, name: str, if_exists: Optional[bool] = None, **kw
336
+ ) -> str:
337
+ return "DROP COLUMN %s%s" % (
338
+ "IF EXISTS " if if_exists else "",
339
+ format_column_name(compiler, name),
340
+ )
341
+
342
+
343
+ def alter_column(compiler: DDLCompiler, name: str) -> str:
344
+ return "ALTER COLUMN %s" % format_column_name(compiler, name)
345
+
346
+
347
+ def add_column(
348
+ compiler: DDLCompiler,
349
+ column: Column[Any],
350
+ if_not_exists: Optional[bool] = None,
351
+ **kw,
352
+ ) -> str:
353
+ text = "ADD COLUMN %s%s" % (
354
+ "IF NOT EXISTS " if if_not_exists else "",
355
+ compiler.get_column_specification(column, **kw),
356
+ )
357
+
358
+ const = " ".join(
359
+ compiler.process(constraint) for constraint in column.constraints
360
+ )
361
+ if const:
362
+ text += " " + const
363
+
364
+ return text
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/impl.py ADDED
@@ -0,0 +1,902 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import logging
7
+ import re
8
+ from typing import Any
9
+ from typing import Callable
10
+ from typing import Dict
11
+ from typing import Iterable
12
+ from typing import List
13
+ from typing import Mapping
14
+ from typing import NamedTuple
15
+ from typing import Optional
16
+ from typing import Sequence
17
+ from typing import Set
18
+ from typing import Tuple
19
+ from typing import Type
20
+ from typing import TYPE_CHECKING
21
+ from typing import Union
22
+
23
+ from sqlalchemy import cast
24
+ from sqlalchemy import Column
25
+ from sqlalchemy import MetaData
26
+ from sqlalchemy import PrimaryKeyConstraint
27
+ from sqlalchemy import schema
28
+ from sqlalchemy import String
29
+ from sqlalchemy import Table
30
+ from sqlalchemy import text
31
+
32
+ from . import _autogen
33
+ from . import base
34
+ from ._autogen import _constraint_sig as _constraint_sig
35
+ from ._autogen import ComparisonResult as ComparisonResult
36
+ from .. import util
37
+ from ..util import sqla_compat
38
+
39
+ if TYPE_CHECKING:
40
+ from typing import Literal
41
+ from typing import TextIO
42
+
43
+ from sqlalchemy.engine import Connection
44
+ from sqlalchemy.engine import Dialect
45
+ from sqlalchemy.engine.cursor import CursorResult
46
+ from sqlalchemy.engine.reflection import Inspector
47
+ from sqlalchemy.sql import ClauseElement
48
+ from sqlalchemy.sql import Executable
49
+ from sqlalchemy.sql.elements import quoted_name
50
+ from sqlalchemy.sql.schema import Constraint
51
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
52
+ from sqlalchemy.sql.schema import Index
53
+ from sqlalchemy.sql.schema import UniqueConstraint
54
+ from sqlalchemy.sql.selectable import TableClause
55
+ from sqlalchemy.sql.type_api import TypeEngine
56
+
57
+ from .base import _ServerDefault
58
+ from ..autogenerate.api import AutogenContext
59
+ from ..operations.batch import ApplyBatchImpl
60
+ from ..operations.batch import BatchOperationsImpl
61
+
62
+ log = logging.getLogger(__name__)
63
+
64
+
65
+ class ImplMeta(type):
66
+ def __init__(
67
+ cls,
68
+ classname: str,
69
+ bases: Tuple[Type[DefaultImpl]],
70
+ dict_: Dict[str, Any],
71
+ ):
72
+ newtype = type.__init__(cls, classname, bases, dict_)
73
+ if "__dialect__" in dict_:
74
+ _impls[dict_["__dialect__"]] = cls # type: ignore[assignment]
75
+ return newtype
76
+
77
+
78
+ _impls: Dict[str, Type[DefaultImpl]] = {}
79
+
80
+
81
+ class DefaultImpl(metaclass=ImplMeta):
82
+ """Provide the entrypoint for major migration operations,
83
+ including database-specific behavioral variances.
84
+
85
+ While individual SQL/DDL constructs already provide
86
+ for database-specific implementations, variances here
87
+ allow for entirely different sequences of operations
88
+ to take place for a particular migration, such as
89
+ SQL Server's special 'IDENTITY INSERT' step for
90
+ bulk inserts.
91
+
92
+ """
93
+
94
+ __dialect__ = "default"
95
+
96
+ transactional_ddl = False
97
+ command_terminator = ";"
98
+ type_synonyms: Tuple[Set[str], ...] = ({"NUMERIC", "DECIMAL"},)
99
+ type_arg_extract: Sequence[str] = ()
100
+ # These attributes are deprecated in SQLAlchemy via #10247. They need to
101
+ # be ignored to support older version that did not use dialect kwargs.
102
+ # They only apply to Oracle and are replaced by oracle_order,
103
+ # oracle_on_null
104
+ identity_attrs_ignore: Tuple[str, ...] = ("order", "on_null")
105
+
106
+ def __init__(
107
+ self,
108
+ dialect: Dialect,
109
+ connection: Optional[Connection],
110
+ as_sql: bool,
111
+ transactional_ddl: Optional[bool],
112
+ output_buffer: Optional[TextIO],
113
+ context_opts: Dict[str, Any],
114
+ ) -> None:
115
+ self.dialect = dialect
116
+ self.connection = connection
117
+ self.as_sql = as_sql
118
+ self.literal_binds = context_opts.get("literal_binds", False)
119
+
120
+ self.output_buffer = output_buffer
121
+ self.memo: dict = {}
122
+ self.context_opts = context_opts
123
+ if transactional_ddl is not None:
124
+ self.transactional_ddl = transactional_ddl
125
+
126
+ if self.literal_binds:
127
+ if not self.as_sql:
128
+ raise util.CommandError(
129
+ "Can't use literal_binds setting without as_sql mode"
130
+ )
131
+
132
+ @classmethod
133
+ def get_by_dialect(cls, dialect: Dialect) -> Type[DefaultImpl]:
134
+ return _impls[dialect.name]
135
+
136
+ def static_output(self, text: str) -> None:
137
+ assert self.output_buffer is not None
138
+ self.output_buffer.write(text + "\n\n")
139
+ self.output_buffer.flush()
140
+
141
+ def version_table_impl(
142
+ self,
143
+ *,
144
+ version_table: str,
145
+ version_table_schema: Optional[str],
146
+ version_table_pk: bool,
147
+ **kw: Any,
148
+ ) -> Table:
149
+ """Generate a :class:`.Table` object which will be used as the
150
+ structure for the Alembic version table.
151
+
152
+ Third party dialects may override this hook to provide an alternate
153
+ structure for this :class:`.Table`; requirements are only that it
154
+ be named based on the ``version_table`` parameter and contains
155
+ at least a single string-holding column named ``version_num``.
156
+
157
+ .. versionadded:: 1.14
158
+
159
+ """
160
+ vt = Table(
161
+ version_table,
162
+ MetaData(),
163
+ Column("version_num", String(32), nullable=False),
164
+ schema=version_table_schema,
165
+ )
166
+ if version_table_pk:
167
+ vt.append_constraint(
168
+ PrimaryKeyConstraint(
169
+ "version_num", name=f"{version_table}_pkc"
170
+ )
171
+ )
172
+
173
+ return vt
174
+
175
+ def requires_recreate_in_batch(
176
+ self, batch_op: BatchOperationsImpl
177
+ ) -> bool:
178
+ """Return True if the given :class:`.BatchOperationsImpl`
179
+ would need the table to be recreated and copied in order to
180
+ proceed.
181
+
182
+ Normally, only returns True on SQLite when operations other
183
+ than add_column are present.
184
+
185
+ """
186
+ return False
187
+
188
+ def prep_table_for_batch(
189
+ self, batch_impl: ApplyBatchImpl, table: Table
190
+ ) -> None:
191
+ """perform any operations needed on a table before a new
192
+ one is created to replace it in batch mode.
193
+
194
+ the PG dialect uses this to drop constraints on the table
195
+ before the new one uses those same names.
196
+
197
+ """
198
+
199
+ @property
200
+ def bind(self) -> Optional[Connection]:
201
+ return self.connection
202
+
203
+ def _exec(
204
+ self,
205
+ construct: Union[Executable, str],
206
+ execution_options: Optional[Mapping[str, Any]] = None,
207
+ multiparams: Optional[Sequence[Mapping[str, Any]]] = None,
208
+ params: Mapping[str, Any] = util.immutabledict(),
209
+ ) -> Optional[CursorResult]:
210
+ if isinstance(construct, str):
211
+ construct = text(construct)
212
+ if self.as_sql:
213
+ if multiparams is not None or params:
214
+ raise TypeError("SQL parameters not allowed with as_sql")
215
+
216
+ compile_kw: dict[str, Any]
217
+ if self.literal_binds and not isinstance(
218
+ construct, schema.DDLElement
219
+ ):
220
+ compile_kw = dict(compile_kwargs={"literal_binds": True})
221
+ else:
222
+ compile_kw = {}
223
+
224
+ if TYPE_CHECKING:
225
+ assert isinstance(construct, ClauseElement)
226
+ compiled = construct.compile(dialect=self.dialect, **compile_kw)
227
+ self.static_output(
228
+ str(compiled).replace("\t", " ").strip()
229
+ + self.command_terminator
230
+ )
231
+ return None
232
+ else:
233
+ conn = self.connection
234
+ assert conn is not None
235
+ if execution_options:
236
+ conn = conn.execution_options(**execution_options)
237
+
238
+ if params and multiparams is not None:
239
+ raise TypeError(
240
+ "Can't send params and multiparams at the same time"
241
+ )
242
+
243
+ if multiparams:
244
+ return conn.execute(construct, multiparams)
245
+ else:
246
+ return conn.execute(construct, params)
247
+
248
+ def execute(
249
+ self,
250
+ sql: Union[Executable, str],
251
+ execution_options: Optional[dict[str, Any]] = None,
252
+ ) -> None:
253
+ self._exec(sql, execution_options)
254
+
255
+ def alter_column(
256
+ self,
257
+ table_name: str,
258
+ column_name: str,
259
+ *,
260
+ nullable: Optional[bool] = None,
261
+ server_default: Optional[
262
+ Union[_ServerDefault, Literal[False]]
263
+ ] = False,
264
+ name: Optional[str] = None,
265
+ type_: Optional[TypeEngine] = None,
266
+ schema: Optional[str] = None,
267
+ autoincrement: Optional[bool] = None,
268
+ comment: Optional[Union[str, Literal[False]]] = False,
269
+ existing_comment: Optional[str] = None,
270
+ existing_type: Optional[TypeEngine] = None,
271
+ existing_server_default: Optional[_ServerDefault] = None,
272
+ existing_nullable: Optional[bool] = None,
273
+ existing_autoincrement: Optional[bool] = None,
274
+ **kw: Any,
275
+ ) -> None:
276
+ if autoincrement is not None or existing_autoincrement is not None:
277
+ util.warn(
278
+ "autoincrement and existing_autoincrement "
279
+ "only make sense for MySQL",
280
+ stacklevel=3,
281
+ )
282
+ if nullable is not None:
283
+ self._exec(
284
+ base.ColumnNullable(
285
+ table_name,
286
+ column_name,
287
+ nullable,
288
+ schema=schema,
289
+ existing_type=existing_type,
290
+ existing_server_default=existing_server_default,
291
+ existing_nullable=existing_nullable,
292
+ existing_comment=existing_comment,
293
+ )
294
+ )
295
+ if server_default is not False:
296
+ kw = {}
297
+ cls_: Type[
298
+ Union[
299
+ base.ComputedColumnDefault,
300
+ base.IdentityColumnDefault,
301
+ base.ColumnDefault,
302
+ ]
303
+ ]
304
+ if sqla_compat._server_default_is_computed(
305
+ server_default, existing_server_default
306
+ ):
307
+ cls_ = base.ComputedColumnDefault
308
+ elif sqla_compat._server_default_is_identity(
309
+ server_default, existing_server_default
310
+ ):
311
+ cls_ = base.IdentityColumnDefault
312
+ kw["impl"] = self
313
+ else:
314
+ cls_ = base.ColumnDefault
315
+ self._exec(
316
+ cls_(
317
+ table_name,
318
+ column_name,
319
+ server_default, # type:ignore[arg-type]
320
+ schema=schema,
321
+ existing_type=existing_type,
322
+ existing_server_default=existing_server_default,
323
+ existing_nullable=existing_nullable,
324
+ existing_comment=existing_comment,
325
+ **kw,
326
+ )
327
+ )
328
+ if type_ is not None:
329
+ self._exec(
330
+ base.ColumnType(
331
+ table_name,
332
+ column_name,
333
+ type_,
334
+ schema=schema,
335
+ existing_type=existing_type,
336
+ existing_server_default=existing_server_default,
337
+ existing_nullable=existing_nullable,
338
+ existing_comment=existing_comment,
339
+ )
340
+ )
341
+
342
+ if comment is not False:
343
+ self._exec(
344
+ base.ColumnComment(
345
+ table_name,
346
+ column_name,
347
+ comment,
348
+ schema=schema,
349
+ existing_type=existing_type,
350
+ existing_server_default=existing_server_default,
351
+ existing_nullable=existing_nullable,
352
+ existing_comment=existing_comment,
353
+ )
354
+ )
355
+
356
+ # do the new name last ;)
357
+ if name is not None:
358
+ self._exec(
359
+ base.ColumnName(
360
+ table_name,
361
+ column_name,
362
+ name,
363
+ schema=schema,
364
+ existing_type=existing_type,
365
+ existing_server_default=existing_server_default,
366
+ existing_nullable=existing_nullable,
367
+ )
368
+ )
369
+
370
+ def add_column(
371
+ self,
372
+ table_name: str,
373
+ column: Column[Any],
374
+ *,
375
+ schema: Optional[Union[str, quoted_name]] = None,
376
+ if_not_exists: Optional[bool] = None,
377
+ ) -> None:
378
+ self._exec(
379
+ base.AddColumn(
380
+ table_name,
381
+ column,
382
+ schema=schema,
383
+ if_not_exists=if_not_exists,
384
+ )
385
+ )
386
+
387
+ def drop_column(
388
+ self,
389
+ table_name: str,
390
+ column: Column[Any],
391
+ *,
392
+ schema: Optional[str] = None,
393
+ if_exists: Optional[bool] = None,
394
+ **kw,
395
+ ) -> None:
396
+ self._exec(
397
+ base.DropColumn(
398
+ table_name, column, schema=schema, if_exists=if_exists
399
+ )
400
+ )
401
+
402
+ def add_constraint(self, const: Any) -> None:
403
+ if const._create_rule is None or const._create_rule(self):
404
+ self._exec(schema.AddConstraint(const))
405
+
406
+ def drop_constraint(self, const: Constraint, **kw: Any) -> None:
407
+ self._exec(schema.DropConstraint(const, **kw))
408
+
409
+ def rename_table(
410
+ self,
411
+ old_table_name: str,
412
+ new_table_name: Union[str, quoted_name],
413
+ schema: Optional[Union[str, quoted_name]] = None,
414
+ ) -> None:
415
+ self._exec(
416
+ base.RenameTable(old_table_name, new_table_name, schema=schema)
417
+ )
418
+
419
+ def create_table(self, table: Table, **kw: Any) -> None:
420
+ table.dispatch.before_create(
421
+ table, self.connection, checkfirst=False, _ddl_runner=self
422
+ )
423
+ self._exec(schema.CreateTable(table, **kw))
424
+ table.dispatch.after_create(
425
+ table, self.connection, checkfirst=False, _ddl_runner=self
426
+ )
427
+ for index in table.indexes:
428
+ self._exec(schema.CreateIndex(index))
429
+
430
+ with_comment = (
431
+ self.dialect.supports_comments and not self.dialect.inline_comments
432
+ )
433
+ comment = table.comment
434
+ if comment and with_comment:
435
+ self.create_table_comment(table)
436
+
437
+ for column in table.columns:
438
+ comment = column.comment
439
+ if comment and with_comment:
440
+ self.create_column_comment(column)
441
+
442
+ def drop_table(self, table: Table, **kw: Any) -> None:
443
+ table.dispatch.before_drop(
444
+ table, self.connection, checkfirst=False, _ddl_runner=self
445
+ )
446
+ self._exec(schema.DropTable(table, **kw))
447
+ table.dispatch.after_drop(
448
+ table, self.connection, checkfirst=False, _ddl_runner=self
449
+ )
450
+
451
+ def create_index(self, index: Index, **kw: Any) -> None:
452
+ self._exec(schema.CreateIndex(index, **kw))
453
+
454
+ def create_table_comment(self, table: Table) -> None:
455
+ self._exec(schema.SetTableComment(table))
456
+
457
+ def drop_table_comment(self, table: Table) -> None:
458
+ self._exec(schema.DropTableComment(table))
459
+
460
+ def create_column_comment(self, column: Column[Any]) -> None:
461
+ self._exec(schema.SetColumnComment(column))
462
+
463
+ def drop_index(self, index: Index, **kw: Any) -> None:
464
+ self._exec(schema.DropIndex(index, **kw))
465
+
466
+ def bulk_insert(
467
+ self,
468
+ table: Union[TableClause, Table],
469
+ rows: List[dict],
470
+ multiinsert: bool = True,
471
+ ) -> None:
472
+ if not isinstance(rows, list):
473
+ raise TypeError("List expected")
474
+ elif rows and not isinstance(rows[0], dict):
475
+ raise TypeError("List of dictionaries expected")
476
+ if self.as_sql:
477
+ for row in rows:
478
+ self._exec(
479
+ table.insert()
480
+ .inline()
481
+ .values(
482
+ **{
483
+ k: (
484
+ sqla_compat._literal_bindparam(
485
+ k, v, type_=table.c[k].type
486
+ )
487
+ if not isinstance(
488
+ v, sqla_compat._literal_bindparam
489
+ )
490
+ else v
491
+ )
492
+ for k, v in row.items()
493
+ }
494
+ )
495
+ )
496
+ else:
497
+ if rows:
498
+ if multiinsert:
499
+ self._exec(table.insert().inline(), multiparams=rows)
500
+ else:
501
+ for row in rows:
502
+ self._exec(table.insert().inline().values(**row))
503
+
504
+ def _tokenize_column_type(self, column: Column) -> Params:
505
+ definition: str
506
+ definition = self.dialect.type_compiler.process(column.type).lower()
507
+
508
+ # tokenize the SQLAlchemy-generated version of a type, so that
509
+ # the two can be compared.
510
+ #
511
+ # examples:
512
+ # NUMERIC(10, 5)
513
+ # TIMESTAMP WITH TIMEZONE
514
+ # INTEGER UNSIGNED
515
+ # INTEGER (10) UNSIGNED
516
+ # INTEGER(10) UNSIGNED
517
+ # varchar character set utf8
518
+ #
519
+
520
+ tokens: List[str] = re.findall(r"[\w\-_]+|\(.+?\)", definition)
521
+
522
+ term_tokens: List[str] = []
523
+ paren_term = None
524
+
525
+ for token in tokens:
526
+ if re.match(r"^\(.*\)$", token):
527
+ paren_term = token
528
+ else:
529
+ term_tokens.append(token)
530
+
531
+ params = Params(term_tokens[0], term_tokens[1:], [], {})
532
+
533
+ if paren_term:
534
+ term: str
535
+ for term in re.findall("[^(),]+", paren_term):
536
+ if "=" in term:
537
+ key, val = term.split("=")
538
+ params.kwargs[key.strip()] = val.strip()
539
+ else:
540
+ params.args.append(term.strip())
541
+
542
+ return params
543
+
544
+ def _column_types_match(
545
+ self, inspector_params: Params, metadata_params: Params
546
+ ) -> bool:
547
+ if inspector_params.token0 == metadata_params.token0:
548
+ return True
549
+
550
+ synonyms = [{t.lower() for t in batch} for batch in self.type_synonyms]
551
+ inspector_all_terms = " ".join(
552
+ [inspector_params.token0] + inspector_params.tokens
553
+ )
554
+ metadata_all_terms = " ".join(
555
+ [metadata_params.token0] + metadata_params.tokens
556
+ )
557
+
558
+ for batch in synonyms:
559
+ if {inspector_all_terms, metadata_all_terms}.issubset(batch) or {
560
+ inspector_params.token0,
561
+ metadata_params.token0,
562
+ }.issubset(batch):
563
+ return True
564
+ return False
565
+
566
+ def _column_args_match(
567
+ self, inspected_params: Params, meta_params: Params
568
+ ) -> bool:
569
+ """We want to compare column parameters. However, we only want
570
+ to compare parameters that are set. If they both have `collation`,
571
+ we want to make sure they are the same. However, if only one
572
+ specifies it, dont flag it for being less specific
573
+ """
574
+
575
+ if (
576
+ len(meta_params.tokens) == len(inspected_params.tokens)
577
+ and meta_params.tokens != inspected_params.tokens
578
+ ):
579
+ return False
580
+
581
+ if (
582
+ len(meta_params.args) == len(inspected_params.args)
583
+ and meta_params.args != inspected_params.args
584
+ ):
585
+ return False
586
+
587
+ insp = " ".join(inspected_params.tokens).lower()
588
+ meta = " ".join(meta_params.tokens).lower()
589
+
590
+ for reg in self.type_arg_extract:
591
+ mi = re.search(reg, insp)
592
+ mm = re.search(reg, meta)
593
+
594
+ if mi and mm and mi.group(1) != mm.group(1):
595
+ return False
596
+
597
+ return True
598
+
599
+ def compare_type(
600
+ self, inspector_column: Column[Any], metadata_column: Column
601
+ ) -> bool:
602
+ """Returns True if there ARE differences between the types of the two
603
+ columns. Takes impl.type_synonyms into account between retrospected
604
+ and metadata types
605
+ """
606
+ inspector_params = self._tokenize_column_type(inspector_column)
607
+ metadata_params = self._tokenize_column_type(metadata_column)
608
+
609
+ if not self._column_types_match(inspector_params, metadata_params):
610
+ return True
611
+ if not self._column_args_match(inspector_params, metadata_params):
612
+ return True
613
+ return False
614
+
615
+ def compare_server_default(
616
+ self,
617
+ inspector_column,
618
+ metadata_column,
619
+ rendered_metadata_default,
620
+ rendered_inspector_default,
621
+ ):
622
+ return rendered_inspector_default != rendered_metadata_default
623
+
624
+ def correct_for_autogen_constraints(
625
+ self,
626
+ conn_uniques: Set[UniqueConstraint],
627
+ conn_indexes: Set[Index],
628
+ metadata_unique_constraints: Set[UniqueConstraint],
629
+ metadata_indexes: Set[Index],
630
+ ) -> None:
631
+ pass
632
+
633
+ def cast_for_batch_migrate(self, existing, existing_transfer, new_type):
634
+ if existing.type._type_affinity is not new_type._type_affinity:
635
+ existing_transfer["expr"] = cast(
636
+ existing_transfer["expr"], new_type
637
+ )
638
+
639
+ def render_ddl_sql_expr(
640
+ self, expr: ClauseElement, is_server_default: bool = False, **kw: Any
641
+ ) -> str:
642
+ """Render a SQL expression that is typically a server default,
643
+ index expression, etc.
644
+
645
+ """
646
+
647
+ compile_kw = {"literal_binds": True, "include_table": False}
648
+
649
+ return str(
650
+ expr.compile(dialect=self.dialect, compile_kwargs=compile_kw)
651
+ )
652
+
653
+ def _compat_autogen_column_reflect(self, inspector: Inspector) -> Callable:
654
+ return self.autogen_column_reflect
655
+
656
+ def correct_for_autogen_foreignkeys(
657
+ self,
658
+ conn_fks: Set[ForeignKeyConstraint],
659
+ metadata_fks: Set[ForeignKeyConstraint],
660
+ ) -> None:
661
+ pass
662
+
663
+ def autogen_column_reflect(self, inspector, table, column_info):
664
+ """A hook that is attached to the 'column_reflect' event for when
665
+ a Table is reflected from the database during the autogenerate
666
+ process.
667
+
668
+ Dialects can elect to modify the information gathered here.
669
+
670
+ """
671
+
672
+ def start_migrations(self) -> None:
673
+ """A hook called when :meth:`.EnvironmentContext.run_migrations`
674
+ is called.
675
+
676
+ Implementations can set up per-migration-run state here.
677
+
678
+ """
679
+
680
+ def emit_begin(self) -> None:
681
+ """Emit the string ``BEGIN``, or the backend-specific
682
+ equivalent, on the current connection context.
683
+
684
+ This is used in offline mode and typically
685
+ via :meth:`.EnvironmentContext.begin_transaction`.
686
+
687
+ """
688
+ self.static_output("BEGIN" + self.command_terminator)
689
+
690
+ def emit_commit(self) -> None:
691
+ """Emit the string ``COMMIT``, or the backend-specific
692
+ equivalent, on the current connection context.
693
+
694
+ This is used in offline mode and typically
695
+ via :meth:`.EnvironmentContext.begin_transaction`.
696
+
697
+ """
698
+ self.static_output("COMMIT" + self.command_terminator)
699
+
700
+ def render_type(
701
+ self, type_obj: TypeEngine, autogen_context: AutogenContext
702
+ ) -> Union[str, Literal[False]]:
703
+ return False
704
+
705
+ def _compare_identity_default(self, metadata_identity, inspector_identity):
706
+ # ignored contains the attributes that were not considered
707
+ # because assumed to their default values in the db.
708
+ diff, ignored = _compare_identity_options(
709
+ metadata_identity,
710
+ inspector_identity,
711
+ schema.Identity(),
712
+ skip={"always"},
713
+ )
714
+
715
+ meta_always = getattr(metadata_identity, "always", None)
716
+ inspector_always = getattr(inspector_identity, "always", None)
717
+ # None and False are the same in this comparison
718
+ if bool(meta_always) != bool(inspector_always):
719
+ diff.add("always")
720
+
721
+ diff.difference_update(self.identity_attrs_ignore)
722
+
723
+ # returns 3 values:
724
+ return (
725
+ # different identity attributes
726
+ diff,
727
+ # ignored identity attributes
728
+ ignored,
729
+ # if the two identity should be considered different
730
+ bool(diff) or bool(metadata_identity) != bool(inspector_identity),
731
+ )
732
+
733
+ def _compare_index_unique(
734
+ self, metadata_index: Index, reflected_index: Index
735
+ ) -> Optional[str]:
736
+ conn_unique = bool(reflected_index.unique)
737
+ meta_unique = bool(metadata_index.unique)
738
+ if conn_unique != meta_unique:
739
+ return f"unique={conn_unique} to unique={meta_unique}"
740
+ else:
741
+ return None
742
+
743
+ def _create_metadata_constraint_sig(
744
+ self, constraint: _autogen._C, **opts: Any
745
+ ) -> _constraint_sig[_autogen._C]:
746
+ return _constraint_sig.from_constraint(True, self, constraint, **opts)
747
+
748
+ def _create_reflected_constraint_sig(
749
+ self, constraint: _autogen._C, **opts: Any
750
+ ) -> _constraint_sig[_autogen._C]:
751
+ return _constraint_sig.from_constraint(False, self, constraint, **opts)
752
+
753
+ def compare_indexes(
754
+ self,
755
+ metadata_index: Index,
756
+ reflected_index: Index,
757
+ ) -> ComparisonResult:
758
+ """Compare two indexes by comparing the signature generated by
759
+ ``create_index_sig``.
760
+
761
+ This method returns a ``ComparisonResult``.
762
+ """
763
+ msg: List[str] = []
764
+ unique_msg = self._compare_index_unique(
765
+ metadata_index, reflected_index
766
+ )
767
+ if unique_msg:
768
+ msg.append(unique_msg)
769
+ m_sig = self._create_metadata_constraint_sig(metadata_index)
770
+ r_sig = self._create_reflected_constraint_sig(reflected_index)
771
+
772
+ assert _autogen.is_index_sig(m_sig)
773
+ assert _autogen.is_index_sig(r_sig)
774
+
775
+ # The assumption is that the index have no expression
776
+ for sig in m_sig, r_sig:
777
+ if sig.has_expressions:
778
+ log.warning(
779
+ "Generating approximate signature for index %s. "
780
+ "The dialect "
781
+ "implementation should either skip expression indexes "
782
+ "or provide a custom implementation.",
783
+ sig.const,
784
+ )
785
+
786
+ if m_sig.column_names != r_sig.column_names:
787
+ msg.append(
788
+ f"expression {r_sig.column_names} to {m_sig.column_names}"
789
+ )
790
+
791
+ if msg:
792
+ return ComparisonResult.Different(msg)
793
+ else:
794
+ return ComparisonResult.Equal()
795
+
796
+ def compare_unique_constraint(
797
+ self,
798
+ metadata_constraint: UniqueConstraint,
799
+ reflected_constraint: UniqueConstraint,
800
+ ) -> ComparisonResult:
801
+ """Compare two unique constraints by comparing the two signatures.
802
+
803
+ The arguments are two tuples that contain the unique constraint and
804
+ the signatures generated by ``create_unique_constraint_sig``.
805
+
806
+ This method returns a ``ComparisonResult``.
807
+ """
808
+ metadata_tup = self._create_metadata_constraint_sig(
809
+ metadata_constraint
810
+ )
811
+ reflected_tup = self._create_reflected_constraint_sig(
812
+ reflected_constraint
813
+ )
814
+
815
+ meta_sig = metadata_tup.unnamed
816
+ conn_sig = reflected_tup.unnamed
817
+ if conn_sig != meta_sig:
818
+ return ComparisonResult.Different(
819
+ f"expression {conn_sig} to {meta_sig}"
820
+ )
821
+ else:
822
+ return ComparisonResult.Equal()
823
+
824
+ def _skip_functional_indexes(self, metadata_indexes, conn_indexes):
825
+ conn_indexes_by_name = {c.name: c for c in conn_indexes}
826
+
827
+ for idx in list(metadata_indexes):
828
+ if idx.name in conn_indexes_by_name:
829
+ continue
830
+ iex = sqla_compat.is_expression_index(idx)
831
+ if iex:
832
+ util.warn(
833
+ "autogenerate skipping metadata-specified "
834
+ "expression-based index "
835
+ f"{idx.name!r}; dialect {self.__dialect__!r} under "
836
+ f"SQLAlchemy {sqla_compat.sqlalchemy_version} can't "
837
+ "reflect these indexes so they can't be compared"
838
+ )
839
+ metadata_indexes.discard(idx)
840
+
841
+ def adjust_reflected_dialect_options(
842
+ self, reflected_object: Dict[str, Any], kind: str
843
+ ) -> Dict[str, Any]:
844
+ return reflected_object.get("dialect_options", {})
845
+
846
+
847
+ class Params(NamedTuple):
848
+ token0: str
849
+ tokens: List[str]
850
+ args: List[str]
851
+ kwargs: Dict[str, str]
852
+
853
+
854
+ def _compare_identity_options(
855
+ metadata_io: Union[schema.Identity, schema.Sequence, None],
856
+ inspector_io: Union[schema.Identity, schema.Sequence, None],
857
+ default_io: Union[schema.Identity, schema.Sequence],
858
+ skip: Set[str],
859
+ ):
860
+ # this can be used for identity or sequence compare.
861
+ # default_io is an instance of IdentityOption with all attributes to the
862
+ # default value.
863
+ meta_d = sqla_compat._get_identity_options_dict(metadata_io)
864
+ insp_d = sqla_compat._get_identity_options_dict(inspector_io)
865
+
866
+ diff = set()
867
+ ignored_attr = set()
868
+
869
+ def check_dicts(
870
+ meta_dict: Mapping[str, Any],
871
+ insp_dict: Mapping[str, Any],
872
+ default_dict: Mapping[str, Any],
873
+ attrs: Iterable[str],
874
+ ):
875
+ for attr in set(attrs).difference(skip):
876
+ meta_value = meta_dict.get(attr)
877
+ insp_value = insp_dict.get(attr)
878
+ if insp_value != meta_value:
879
+ default_value = default_dict.get(attr)
880
+ if meta_value == default_value:
881
+ ignored_attr.add(attr)
882
+ else:
883
+ diff.add(attr)
884
+
885
+ check_dicts(
886
+ meta_d,
887
+ insp_d,
888
+ sqla_compat._get_identity_options_dict(default_io),
889
+ set(meta_d).union(insp_d),
890
+ )
891
+ if sqla_compat.identity_has_dialect_kwargs:
892
+ assert hasattr(default_io, "dialect_kwargs")
893
+ # use only the dialect kwargs in inspector_io since metadata_io
894
+ # can have options for many backends
895
+ check_dicts(
896
+ getattr(metadata_io, "dialect_kwargs", {}),
897
+ getattr(inspector_io, "dialect_kwargs", {}),
898
+ default_io.dialect_kwargs,
899
+ getattr(inspector_io, "dialect_kwargs", {}),
900
+ )
901
+
902
+ return diff, ignored_attr
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/mssql.py ADDED
@@ -0,0 +1,421 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import re
7
+ from typing import Any
8
+ from typing import Dict
9
+ from typing import List
10
+ from typing import Optional
11
+ from typing import TYPE_CHECKING
12
+ from typing import Union
13
+
14
+ from sqlalchemy import types as sqltypes
15
+ from sqlalchemy.schema import Column
16
+ from sqlalchemy.schema import CreateIndex
17
+ from sqlalchemy.sql.base import Executable
18
+ from sqlalchemy.sql.elements import ClauseElement
19
+
20
+ from .base import AddColumn
21
+ from .base import alter_column
22
+ from .base import alter_table
23
+ from .base import ColumnDefault
24
+ from .base import ColumnName
25
+ from .base import ColumnNullable
26
+ from .base import ColumnType
27
+ from .base import format_column_name
28
+ from .base import format_server_default
29
+ from .base import format_table_name
30
+ from .base import format_type
31
+ from .base import RenameTable
32
+ from .impl import DefaultImpl
33
+ from .. import util
34
+ from ..util import sqla_compat
35
+ from ..util.sqla_compat import compiles
36
+
37
+ if TYPE_CHECKING:
38
+ from typing import Literal
39
+
40
+ from sqlalchemy.dialects.mssql.base import MSDDLCompiler
41
+ from sqlalchemy.dialects.mssql.base import MSSQLCompiler
42
+ from sqlalchemy.engine.cursor import CursorResult
43
+ from sqlalchemy.sql.schema import Index
44
+ from sqlalchemy.sql.schema import Table
45
+ from sqlalchemy.sql.selectable import TableClause
46
+ from sqlalchemy.sql.type_api import TypeEngine
47
+
48
+ from .base import _ServerDefault
49
+
50
+
51
+ class MSSQLImpl(DefaultImpl):
52
+ __dialect__ = "mssql"
53
+ transactional_ddl = True
54
+ batch_separator = "GO"
55
+
56
+ type_synonyms = DefaultImpl.type_synonyms + ({"VARCHAR", "NVARCHAR"},)
57
+ identity_attrs_ignore = DefaultImpl.identity_attrs_ignore + (
58
+ "minvalue",
59
+ "maxvalue",
60
+ "nominvalue",
61
+ "nomaxvalue",
62
+ "cycle",
63
+ "cache",
64
+ )
65
+
66
+ def __init__(self, *arg, **kw) -> None:
67
+ super().__init__(*arg, **kw)
68
+ self.batch_separator = self.context_opts.get(
69
+ "mssql_batch_separator", self.batch_separator
70
+ )
71
+
72
+ def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]:
73
+ result = super()._exec(construct, *args, **kw)
74
+ if self.as_sql and self.batch_separator:
75
+ self.static_output(self.batch_separator)
76
+ return result
77
+
78
+ def emit_begin(self) -> None:
79
+ self.static_output("BEGIN TRANSACTION" + self.command_terminator)
80
+
81
+ def emit_commit(self) -> None:
82
+ super().emit_commit()
83
+ if self.as_sql and self.batch_separator:
84
+ self.static_output(self.batch_separator)
85
+
86
+ def alter_column(
87
+ self,
88
+ table_name: str,
89
+ column_name: str,
90
+ *,
91
+ nullable: Optional[bool] = None,
92
+ server_default: Optional[
93
+ Union[_ServerDefault, Literal[False]]
94
+ ] = False,
95
+ name: Optional[str] = None,
96
+ type_: Optional[TypeEngine] = None,
97
+ schema: Optional[str] = None,
98
+ existing_type: Optional[TypeEngine] = None,
99
+ existing_server_default: Optional[_ServerDefault] = None,
100
+ existing_nullable: Optional[bool] = None,
101
+ **kw: Any,
102
+ ) -> None:
103
+ if nullable is not None:
104
+ if type_ is not None:
105
+ # the NULL/NOT NULL alter will handle
106
+ # the type alteration
107
+ existing_type = type_
108
+ type_ = None
109
+ elif existing_type is None:
110
+ raise util.CommandError(
111
+ "MS-SQL ALTER COLUMN operations "
112
+ "with NULL or NOT NULL require the "
113
+ "existing_type or a new type_ be passed."
114
+ )
115
+ elif existing_nullable is not None and type_ is not None:
116
+ nullable = existing_nullable
117
+
118
+ # the NULL/NOT NULL alter will handle
119
+ # the type alteration
120
+ existing_type = type_
121
+ type_ = None
122
+
123
+ elif type_ is not None:
124
+ util.warn(
125
+ "MS-SQL ALTER COLUMN operations that specify type_= "
126
+ "should also specify a nullable= or "
127
+ "existing_nullable= argument to avoid implicit conversion "
128
+ "of NOT NULL columns to NULL."
129
+ )
130
+
131
+ used_default = False
132
+ if sqla_compat._server_default_is_identity(
133
+ server_default, existing_server_default
134
+ ) or sqla_compat._server_default_is_computed(
135
+ server_default, existing_server_default
136
+ ):
137
+ used_default = True
138
+ kw["server_default"] = server_default
139
+ kw["existing_server_default"] = existing_server_default
140
+
141
+ super().alter_column(
142
+ table_name,
143
+ column_name,
144
+ nullable=nullable,
145
+ type_=type_,
146
+ schema=schema,
147
+ existing_type=existing_type,
148
+ existing_nullable=existing_nullable,
149
+ **kw,
150
+ )
151
+
152
+ if server_default is not False and used_default is False:
153
+ if existing_server_default is not False or server_default is None:
154
+ self._exec(
155
+ _ExecDropConstraint(
156
+ table_name,
157
+ column_name,
158
+ "sys.default_constraints",
159
+ schema,
160
+ )
161
+ )
162
+ if server_default is not None:
163
+ super().alter_column(
164
+ table_name,
165
+ column_name,
166
+ schema=schema,
167
+ server_default=server_default,
168
+ )
169
+
170
+ if name is not None:
171
+ super().alter_column(
172
+ table_name, column_name, schema=schema, name=name
173
+ )
174
+
175
+ def create_index(self, index: Index, **kw: Any) -> None:
176
+ # this likely defaults to None if not present, so get()
177
+ # should normally not return the default value. being
178
+ # defensive in any case
179
+ mssql_include = index.kwargs.get("mssql_include", None) or ()
180
+ assert index.table is not None
181
+ for col in mssql_include:
182
+ if col not in index.table.c:
183
+ index.table.append_column(Column(col, sqltypes.NullType))
184
+ self._exec(CreateIndex(index, **kw))
185
+
186
+ def bulk_insert( # type:ignore[override]
187
+ self, table: Union[TableClause, Table], rows: List[dict], **kw: Any
188
+ ) -> None:
189
+ if self.as_sql:
190
+ self._exec(
191
+ "SET IDENTITY_INSERT %s ON"
192
+ % self.dialect.identifier_preparer.format_table(table)
193
+ )
194
+ super().bulk_insert(table, rows, **kw)
195
+ self._exec(
196
+ "SET IDENTITY_INSERT %s OFF"
197
+ % self.dialect.identifier_preparer.format_table(table)
198
+ )
199
+ else:
200
+ super().bulk_insert(table, rows, **kw)
201
+
202
+ def drop_column(
203
+ self,
204
+ table_name: str,
205
+ column: Column[Any],
206
+ *,
207
+ schema: Optional[str] = None,
208
+ **kw,
209
+ ) -> None:
210
+ drop_default = kw.pop("mssql_drop_default", False)
211
+ if drop_default:
212
+ self._exec(
213
+ _ExecDropConstraint(
214
+ table_name, column, "sys.default_constraints", schema
215
+ )
216
+ )
217
+ drop_check = kw.pop("mssql_drop_check", False)
218
+ if drop_check:
219
+ self._exec(
220
+ _ExecDropConstraint(
221
+ table_name, column, "sys.check_constraints", schema
222
+ )
223
+ )
224
+ drop_fks = kw.pop("mssql_drop_foreign_key", False)
225
+ if drop_fks:
226
+ self._exec(_ExecDropFKConstraint(table_name, column, schema))
227
+ super().drop_column(table_name, column, schema=schema, **kw)
228
+
229
+ def compare_server_default(
230
+ self,
231
+ inspector_column,
232
+ metadata_column,
233
+ rendered_metadata_default,
234
+ rendered_inspector_default,
235
+ ):
236
+ if rendered_metadata_default is not None:
237
+ rendered_metadata_default = re.sub(
238
+ r"[\(\) \"\']", "", rendered_metadata_default
239
+ )
240
+
241
+ if rendered_inspector_default is not None:
242
+ # SQL Server collapses whitespace and adds arbitrary parenthesis
243
+ # within expressions. our only option is collapse all of it
244
+
245
+ rendered_inspector_default = re.sub(
246
+ r"[\(\) \"\']", "", rendered_inspector_default
247
+ )
248
+
249
+ return rendered_inspector_default != rendered_metadata_default
250
+
251
+ def _compare_identity_default(self, metadata_identity, inspector_identity):
252
+ diff, ignored, is_alter = super()._compare_identity_default(
253
+ metadata_identity, inspector_identity
254
+ )
255
+
256
+ if (
257
+ metadata_identity is None
258
+ and inspector_identity is not None
259
+ and not diff
260
+ and inspector_identity.column is not None
261
+ and inspector_identity.column.primary_key
262
+ ):
263
+ # mssql reflect primary keys with autoincrement as identity
264
+ # columns. if no different attributes are present ignore them
265
+ is_alter = False
266
+
267
+ return diff, ignored, is_alter
268
+
269
+ def adjust_reflected_dialect_options(
270
+ self, reflected_object: Dict[str, Any], kind: str
271
+ ) -> Dict[str, Any]:
272
+ options: Dict[str, Any]
273
+ options = reflected_object.get("dialect_options", {}).copy()
274
+ if not options.get("mssql_include"):
275
+ options.pop("mssql_include", None)
276
+ if not options.get("mssql_clustered"):
277
+ options.pop("mssql_clustered", None)
278
+ return options
279
+
280
+
281
+ class _ExecDropConstraint(Executable, ClauseElement):
282
+ inherit_cache = False
283
+
284
+ def __init__(
285
+ self,
286
+ tname: str,
287
+ colname: Union[Column[Any], str],
288
+ type_: str,
289
+ schema: Optional[str],
290
+ ) -> None:
291
+ self.tname = tname
292
+ self.colname = colname
293
+ self.type_ = type_
294
+ self.schema = schema
295
+
296
+
297
+ class _ExecDropFKConstraint(Executable, ClauseElement):
298
+ inherit_cache = False
299
+
300
+ def __init__(
301
+ self, tname: str, colname: Column[Any], schema: Optional[str]
302
+ ) -> None:
303
+ self.tname = tname
304
+ self.colname = colname
305
+ self.schema = schema
306
+
307
+
308
+ @compiles(_ExecDropConstraint, "mssql")
309
+ def _exec_drop_col_constraint(
310
+ element: _ExecDropConstraint, compiler: MSSQLCompiler, **kw
311
+ ) -> str:
312
+ schema, tname, colname, type_ = (
313
+ element.schema,
314
+ element.tname,
315
+ element.colname,
316
+ element.type_,
317
+ )
318
+ # from http://www.mssqltips.com/sqlservertip/1425/\
319
+ # working-with-default-constraints-in-sql-server/
320
+ return """declare @const_name varchar(256)
321
+ select @const_name = QUOTENAME([name]) from %(type)s
322
+ where parent_object_id = object_id('%(schema_dot)s%(tname)s')
323
+ and col_name(parent_object_id, parent_column_id) = '%(colname)s'
324
+ exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % {
325
+ "type": type_,
326
+ "tname": tname,
327
+ "colname": colname,
328
+ "tname_quoted": format_table_name(compiler, tname, schema),
329
+ "schema_dot": schema + "." if schema else "",
330
+ }
331
+
332
+
333
+ @compiles(_ExecDropFKConstraint, "mssql")
334
+ def _exec_drop_col_fk_constraint(
335
+ element: _ExecDropFKConstraint, compiler: MSSQLCompiler, **kw
336
+ ) -> str:
337
+ schema, tname, colname = element.schema, element.tname, element.colname
338
+
339
+ return """declare @const_name varchar(256)
340
+ select @const_name = QUOTENAME([name]) from
341
+ sys.foreign_keys fk join sys.foreign_key_columns fkc
342
+ on fk.object_id=fkc.constraint_object_id
343
+ where fkc.parent_object_id = object_id('%(schema_dot)s%(tname)s')
344
+ and col_name(fkc.parent_object_id, fkc.parent_column_id) = '%(colname)s'
345
+ exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % {
346
+ "tname": tname,
347
+ "colname": colname,
348
+ "tname_quoted": format_table_name(compiler, tname, schema),
349
+ "schema_dot": schema + "." if schema else "",
350
+ }
351
+
352
+
353
+ @compiles(AddColumn, "mssql")
354
+ def visit_add_column(element: AddColumn, compiler: MSDDLCompiler, **kw) -> str:
355
+ return "%s %s" % (
356
+ alter_table(compiler, element.table_name, element.schema),
357
+ mssql_add_column(compiler, element.column, **kw),
358
+ )
359
+
360
+
361
+ def mssql_add_column(
362
+ compiler: MSDDLCompiler, column: Column[Any], **kw
363
+ ) -> str:
364
+ return "ADD %s" % compiler.get_column_specification(column, **kw)
365
+
366
+
367
+ @compiles(ColumnNullable, "mssql")
368
+ def visit_column_nullable(
369
+ element: ColumnNullable, compiler: MSDDLCompiler, **kw
370
+ ) -> str:
371
+ return "%s %s %s %s" % (
372
+ alter_table(compiler, element.table_name, element.schema),
373
+ alter_column(compiler, element.column_name),
374
+ format_type(compiler, element.existing_type), # type: ignore[arg-type]
375
+ "NULL" if element.nullable else "NOT NULL",
376
+ )
377
+
378
+
379
+ @compiles(ColumnDefault, "mssql")
380
+ def visit_column_default(
381
+ element: ColumnDefault, compiler: MSDDLCompiler, **kw
382
+ ) -> str:
383
+ # TODO: there can also be a named constraint
384
+ # with ADD CONSTRAINT here
385
+ return "%s ADD DEFAULT %s FOR %s" % (
386
+ alter_table(compiler, element.table_name, element.schema),
387
+ format_server_default(compiler, element.default),
388
+ format_column_name(compiler, element.column_name),
389
+ )
390
+
391
+
392
+ @compiles(ColumnName, "mssql")
393
+ def visit_rename_column(
394
+ element: ColumnName, compiler: MSDDLCompiler, **kw
395
+ ) -> str:
396
+ return "EXEC sp_rename '%s.%s', %s, 'COLUMN'" % (
397
+ format_table_name(compiler, element.table_name, element.schema),
398
+ format_column_name(compiler, element.column_name),
399
+ format_column_name(compiler, element.newname),
400
+ )
401
+
402
+
403
+ @compiles(ColumnType, "mssql")
404
+ def visit_column_type(
405
+ element: ColumnType, compiler: MSDDLCompiler, **kw
406
+ ) -> str:
407
+ return "%s %s %s" % (
408
+ alter_table(compiler, element.table_name, element.schema),
409
+ alter_column(compiler, element.column_name),
410
+ format_type(compiler, element.type_),
411
+ )
412
+
413
+
414
+ @compiles(RenameTable, "mssql")
415
+ def visit_rename_table(
416
+ element: RenameTable, compiler: MSDDLCompiler, **kw
417
+ ) -> str:
418
+ return "EXEC sp_rename '%s', %s" % (
419
+ format_table_name(compiler, element.table_name, element.schema),
420
+ format_table_name(compiler, element.new_table_name, None),
421
+ )
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/mysql.py ADDED
@@ -0,0 +1,495 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import re
7
+ from typing import Any
8
+ from typing import Optional
9
+ from typing import TYPE_CHECKING
10
+ from typing import Union
11
+
12
+ from sqlalchemy import schema
13
+ from sqlalchemy import types as sqltypes
14
+
15
+ from .base import alter_table
16
+ from .base import AlterColumn
17
+ from .base import ColumnDefault
18
+ from .base import ColumnName
19
+ from .base import ColumnNullable
20
+ from .base import ColumnType
21
+ from .base import format_column_name
22
+ from .base import format_server_default
23
+ from .impl import DefaultImpl
24
+ from .. import util
25
+ from ..util import sqla_compat
26
+ from ..util.sqla_compat import _is_type_bound
27
+ from ..util.sqla_compat import compiles
28
+
29
+ if TYPE_CHECKING:
30
+ from typing import Literal
31
+
32
+ from sqlalchemy.dialects.mysql.base import MySQLDDLCompiler
33
+ from sqlalchemy.sql.ddl import DropConstraint
34
+ from sqlalchemy.sql.schema import Constraint
35
+ from sqlalchemy.sql.type_api import TypeEngine
36
+
37
+ from .base import _ServerDefault
38
+
39
+
40
+ class MySQLImpl(DefaultImpl):
41
+ __dialect__ = "mysql"
42
+
43
+ transactional_ddl = False
44
+ type_synonyms = DefaultImpl.type_synonyms + (
45
+ {"BOOL", "TINYINT"},
46
+ {"JSON", "LONGTEXT"},
47
+ )
48
+ type_arg_extract = [r"character set ([\w\-_]+)", r"collate ([\w\-_]+)"]
49
+
50
+ def alter_column(
51
+ self,
52
+ table_name: str,
53
+ column_name: str,
54
+ *,
55
+ nullable: Optional[bool] = None,
56
+ server_default: Optional[
57
+ Union[_ServerDefault, Literal[False]]
58
+ ] = False,
59
+ name: Optional[str] = None,
60
+ type_: Optional[TypeEngine] = None,
61
+ schema: Optional[str] = None,
62
+ existing_type: Optional[TypeEngine] = None,
63
+ existing_server_default: Optional[_ServerDefault] = None,
64
+ existing_nullable: Optional[bool] = None,
65
+ autoincrement: Optional[bool] = None,
66
+ existing_autoincrement: Optional[bool] = None,
67
+ comment: Optional[Union[str, Literal[False]]] = False,
68
+ existing_comment: Optional[str] = None,
69
+ **kw: Any,
70
+ ) -> None:
71
+ if sqla_compat._server_default_is_identity(
72
+ server_default, existing_server_default
73
+ ) or sqla_compat._server_default_is_computed(
74
+ server_default, existing_server_default
75
+ ):
76
+ # modifying computed or identity columns is not supported
77
+ # the default will raise
78
+ super().alter_column(
79
+ table_name,
80
+ column_name,
81
+ nullable=nullable,
82
+ type_=type_,
83
+ schema=schema,
84
+ existing_type=existing_type,
85
+ existing_nullable=existing_nullable,
86
+ server_default=server_default,
87
+ existing_server_default=existing_server_default,
88
+ **kw,
89
+ )
90
+ if name is not None or self._is_mysql_allowed_functional_default(
91
+ type_ if type_ is not None else existing_type, server_default
92
+ ):
93
+ self._exec(
94
+ MySQLChangeColumn(
95
+ table_name,
96
+ column_name,
97
+ schema=schema,
98
+ newname=name if name is not None else column_name,
99
+ nullable=(
100
+ nullable
101
+ if nullable is not None
102
+ else (
103
+ existing_nullable
104
+ if existing_nullable is not None
105
+ else True
106
+ )
107
+ ),
108
+ type_=type_ if type_ is not None else existing_type,
109
+ default=(
110
+ server_default
111
+ if server_default is not False
112
+ else existing_server_default
113
+ ),
114
+ autoincrement=(
115
+ autoincrement
116
+ if autoincrement is not None
117
+ else existing_autoincrement
118
+ ),
119
+ comment=(
120
+ comment if comment is not False else existing_comment
121
+ ),
122
+ )
123
+ )
124
+ elif (
125
+ nullable is not None
126
+ or type_ is not None
127
+ or autoincrement is not None
128
+ or comment is not False
129
+ ):
130
+ self._exec(
131
+ MySQLModifyColumn(
132
+ table_name,
133
+ column_name,
134
+ schema=schema,
135
+ newname=name if name is not None else column_name,
136
+ nullable=(
137
+ nullable
138
+ if nullable is not None
139
+ else (
140
+ existing_nullable
141
+ if existing_nullable is not None
142
+ else True
143
+ )
144
+ ),
145
+ type_=type_ if type_ is not None else existing_type,
146
+ default=(
147
+ server_default
148
+ if server_default is not False
149
+ else existing_server_default
150
+ ),
151
+ autoincrement=(
152
+ autoincrement
153
+ if autoincrement is not None
154
+ else existing_autoincrement
155
+ ),
156
+ comment=(
157
+ comment if comment is not False else existing_comment
158
+ ),
159
+ )
160
+ )
161
+ elif server_default is not False:
162
+ self._exec(
163
+ MySQLAlterDefault(
164
+ table_name, column_name, server_default, schema=schema
165
+ )
166
+ )
167
+
168
+ def drop_constraint(
169
+ self,
170
+ const: Constraint,
171
+ **kw: Any,
172
+ ) -> None:
173
+ if isinstance(const, schema.CheckConstraint) and _is_type_bound(const):
174
+ return
175
+
176
+ super().drop_constraint(const)
177
+
178
+ def _is_mysql_allowed_functional_default(
179
+ self,
180
+ type_: Optional[TypeEngine],
181
+ server_default: Optional[Union[_ServerDefault, Literal[False]]],
182
+ ) -> bool:
183
+ return (
184
+ type_ is not None
185
+ and type_._type_affinity is sqltypes.DateTime
186
+ and server_default is not None
187
+ )
188
+
189
+ def compare_server_default(
190
+ self,
191
+ inspector_column,
192
+ metadata_column,
193
+ rendered_metadata_default,
194
+ rendered_inspector_default,
195
+ ):
196
+ # partially a workaround for SQLAlchemy issue #3023; if the
197
+ # column were created without "NOT NULL", MySQL may have added
198
+ # an implicit default of '0' which we need to skip
199
+ # TODO: this is not really covered anymore ?
200
+ if (
201
+ metadata_column.type._type_affinity is sqltypes.Integer
202
+ and inspector_column.primary_key
203
+ and not inspector_column.autoincrement
204
+ and not rendered_metadata_default
205
+ and rendered_inspector_default == "'0'"
206
+ ):
207
+ return False
208
+ elif (
209
+ rendered_inspector_default
210
+ and inspector_column.type._type_affinity is sqltypes.Integer
211
+ ):
212
+ rendered_inspector_default = (
213
+ re.sub(r"^'|'$", "", rendered_inspector_default)
214
+ if rendered_inspector_default is not None
215
+ else None
216
+ )
217
+ return rendered_inspector_default != rendered_metadata_default
218
+ elif (
219
+ rendered_metadata_default
220
+ and metadata_column.type._type_affinity is sqltypes.String
221
+ ):
222
+ metadata_default = re.sub(r"^'|'$", "", rendered_metadata_default)
223
+ return rendered_inspector_default != f"'{metadata_default}'"
224
+ elif rendered_inspector_default and rendered_metadata_default:
225
+ # adjust for "function()" vs. "FUNCTION" as can occur particularly
226
+ # for the CURRENT_TIMESTAMP function on newer MariaDB versions
227
+
228
+ # SQLAlchemy MySQL dialect bundles ON UPDATE into the server
229
+ # default; adjust for this possibly being present.
230
+ onupdate_ins = re.match(
231
+ r"(.*) (on update.*?)(?:\(\))?$",
232
+ rendered_inspector_default.lower(),
233
+ )
234
+ onupdate_met = re.match(
235
+ r"(.*) (on update.*?)(?:\(\))?$",
236
+ rendered_metadata_default.lower(),
237
+ )
238
+
239
+ if onupdate_ins:
240
+ if not onupdate_met:
241
+ return True
242
+ elif onupdate_ins.group(2) != onupdate_met.group(2):
243
+ return True
244
+
245
+ rendered_inspector_default = onupdate_ins.group(1)
246
+ rendered_metadata_default = onupdate_met.group(1)
247
+
248
+ return re.sub(
249
+ r"(.*?)(?:\(\))?$", r"\1", rendered_inspector_default.lower()
250
+ ) != re.sub(
251
+ r"(.*?)(?:\(\))?$", r"\1", rendered_metadata_default.lower()
252
+ )
253
+ else:
254
+ return rendered_inspector_default != rendered_metadata_default
255
+
256
+ def correct_for_autogen_constraints(
257
+ self,
258
+ conn_unique_constraints,
259
+ conn_indexes,
260
+ metadata_unique_constraints,
261
+ metadata_indexes,
262
+ ):
263
+ # TODO: if SQLA 1.0, make use of "duplicates_index"
264
+ # metadata
265
+ removed = set()
266
+ for idx in list(conn_indexes):
267
+ if idx.unique:
268
+ continue
269
+ # MySQL puts implicit indexes on FK columns, even if
270
+ # composite and even if MyISAM, so can't check this too easily.
271
+ # the name of the index may be the column name or it may
272
+ # be the name of the FK constraint.
273
+ for col in idx.columns:
274
+ if idx.name == col.name:
275
+ conn_indexes.remove(idx)
276
+ removed.add(idx.name)
277
+ break
278
+ for fk in col.foreign_keys:
279
+ if fk.name == idx.name:
280
+ conn_indexes.remove(idx)
281
+ removed.add(idx.name)
282
+ break
283
+ if idx.name in removed:
284
+ break
285
+
286
+ # then remove indexes from the "metadata_indexes"
287
+ # that we've removed from reflected, otherwise they come out
288
+ # as adds (see #202)
289
+ for idx in list(metadata_indexes):
290
+ if idx.name in removed:
291
+ metadata_indexes.remove(idx)
292
+
293
+ def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks):
294
+ conn_fk_by_sig = {
295
+ self._create_reflected_constraint_sig(fk).unnamed_no_options: fk
296
+ for fk in conn_fks
297
+ }
298
+ metadata_fk_by_sig = {
299
+ self._create_metadata_constraint_sig(fk).unnamed_no_options: fk
300
+ for fk in metadata_fks
301
+ }
302
+
303
+ for sig in set(conn_fk_by_sig).intersection(metadata_fk_by_sig):
304
+ mdfk = metadata_fk_by_sig[sig]
305
+ cnfk = conn_fk_by_sig[sig]
306
+ # MySQL considers RESTRICT to be the default and doesn't
307
+ # report on it. if the model has explicit RESTRICT and
308
+ # the conn FK has None, set it to RESTRICT
309
+ if (
310
+ mdfk.ondelete is not None
311
+ and mdfk.ondelete.lower() == "restrict"
312
+ and cnfk.ondelete is None
313
+ ):
314
+ cnfk.ondelete = "RESTRICT"
315
+ if (
316
+ mdfk.onupdate is not None
317
+ and mdfk.onupdate.lower() == "restrict"
318
+ and cnfk.onupdate is None
319
+ ):
320
+ cnfk.onupdate = "RESTRICT"
321
+
322
+
323
+ class MariaDBImpl(MySQLImpl):
324
+ __dialect__ = "mariadb"
325
+
326
+
327
+ class MySQLAlterDefault(AlterColumn):
328
+ def __init__(
329
+ self,
330
+ name: str,
331
+ column_name: str,
332
+ default: Optional[_ServerDefault],
333
+ schema: Optional[str] = None,
334
+ ) -> None:
335
+ super(AlterColumn, self).__init__(name, schema=schema)
336
+ self.column_name = column_name
337
+ self.default = default
338
+
339
+
340
+ class MySQLChangeColumn(AlterColumn):
341
+ def __init__(
342
+ self,
343
+ name: str,
344
+ column_name: str,
345
+ schema: Optional[str] = None,
346
+ newname: Optional[str] = None,
347
+ type_: Optional[TypeEngine] = None,
348
+ nullable: Optional[bool] = None,
349
+ default: Optional[Union[_ServerDefault, Literal[False]]] = False,
350
+ autoincrement: Optional[bool] = None,
351
+ comment: Optional[Union[str, Literal[False]]] = False,
352
+ ) -> None:
353
+ super(AlterColumn, self).__init__(name, schema=schema)
354
+ self.column_name = column_name
355
+ self.nullable = nullable
356
+ self.newname = newname
357
+ self.default = default
358
+ self.autoincrement = autoincrement
359
+ self.comment = comment
360
+ if type_ is None:
361
+ raise util.CommandError(
362
+ "All MySQL CHANGE/MODIFY COLUMN operations "
363
+ "require the existing type."
364
+ )
365
+
366
+ self.type_ = sqltypes.to_instance(type_)
367
+
368
+
369
+ class MySQLModifyColumn(MySQLChangeColumn):
370
+ pass
371
+
372
+
373
+ @compiles(ColumnNullable, "mysql", "mariadb")
374
+ @compiles(ColumnName, "mysql", "mariadb")
375
+ @compiles(ColumnDefault, "mysql", "mariadb")
376
+ @compiles(ColumnType, "mysql", "mariadb")
377
+ def _mysql_doesnt_support_individual(element, compiler, **kw):
378
+ raise NotImplementedError(
379
+ "Individual alter column constructs not supported by MySQL"
380
+ )
381
+
382
+
383
+ @compiles(MySQLAlterDefault, "mysql", "mariadb")
384
+ def _mysql_alter_default(
385
+ element: MySQLAlterDefault, compiler: MySQLDDLCompiler, **kw
386
+ ) -> str:
387
+ return "%s ALTER COLUMN %s %s" % (
388
+ alter_table(compiler, element.table_name, element.schema),
389
+ format_column_name(compiler, element.column_name),
390
+ (
391
+ "SET DEFAULT %s" % format_server_default(compiler, element.default)
392
+ if element.default is not None
393
+ else "DROP DEFAULT"
394
+ ),
395
+ )
396
+
397
+
398
+ @compiles(MySQLModifyColumn, "mysql", "mariadb")
399
+ def _mysql_modify_column(
400
+ element: MySQLModifyColumn, compiler: MySQLDDLCompiler, **kw
401
+ ) -> str:
402
+ return "%s MODIFY %s %s" % (
403
+ alter_table(compiler, element.table_name, element.schema),
404
+ format_column_name(compiler, element.column_name),
405
+ _mysql_colspec(
406
+ compiler,
407
+ nullable=element.nullable,
408
+ server_default=element.default,
409
+ type_=element.type_,
410
+ autoincrement=element.autoincrement,
411
+ comment=element.comment,
412
+ ),
413
+ )
414
+
415
+
416
+ @compiles(MySQLChangeColumn, "mysql", "mariadb")
417
+ def _mysql_change_column(
418
+ element: MySQLChangeColumn, compiler: MySQLDDLCompiler, **kw
419
+ ) -> str:
420
+ return "%s CHANGE %s %s %s" % (
421
+ alter_table(compiler, element.table_name, element.schema),
422
+ format_column_name(compiler, element.column_name),
423
+ format_column_name(compiler, element.newname),
424
+ _mysql_colspec(
425
+ compiler,
426
+ nullable=element.nullable,
427
+ server_default=element.default,
428
+ type_=element.type_,
429
+ autoincrement=element.autoincrement,
430
+ comment=element.comment,
431
+ ),
432
+ )
433
+
434
+
435
+ def _mysql_colspec(
436
+ compiler: MySQLDDLCompiler,
437
+ nullable: Optional[bool],
438
+ server_default: Optional[Union[_ServerDefault, Literal[False]]],
439
+ type_: TypeEngine,
440
+ autoincrement: Optional[bool],
441
+ comment: Optional[Union[str, Literal[False]]],
442
+ ) -> str:
443
+ spec = "%s %s" % (
444
+ compiler.dialect.type_compiler.process(type_),
445
+ "NULL" if nullable else "NOT NULL",
446
+ )
447
+ if autoincrement:
448
+ spec += " AUTO_INCREMENT"
449
+ if server_default is not False and server_default is not None:
450
+ spec += " DEFAULT %s" % format_server_default(compiler, server_default)
451
+ if comment:
452
+ spec += " COMMENT %s" % compiler.sql_compiler.render_literal_value(
453
+ comment, sqltypes.String()
454
+ )
455
+
456
+ return spec
457
+
458
+
459
+ @compiles(schema.DropConstraint, "mysql", "mariadb")
460
+ def _mysql_drop_constraint(
461
+ element: DropConstraint, compiler: MySQLDDLCompiler, **kw
462
+ ) -> str:
463
+ """Redefine SQLAlchemy's drop constraint to
464
+ raise errors for invalid constraint type."""
465
+
466
+ constraint = element.element
467
+ if isinstance(
468
+ constraint,
469
+ (
470
+ schema.ForeignKeyConstraint,
471
+ schema.PrimaryKeyConstraint,
472
+ schema.UniqueConstraint,
473
+ ),
474
+ ):
475
+ assert not kw
476
+ return compiler.visit_drop_constraint(element)
477
+ elif isinstance(constraint, schema.CheckConstraint):
478
+ # note that SQLAlchemy as of 1.2 does not yet support
479
+ # DROP CONSTRAINT for MySQL/MariaDB, so we implement fully
480
+ # here.
481
+ if compiler.dialect.is_mariadb: # type: ignore[attr-defined]
482
+ return "ALTER TABLE %s DROP CONSTRAINT %s" % (
483
+ compiler.preparer.format_table(constraint.table),
484
+ compiler.preparer.format_constraint(constraint),
485
+ )
486
+ else:
487
+ return "ALTER TABLE %s DROP CHECK %s" % (
488
+ compiler.preparer.format_table(constraint.table),
489
+ compiler.preparer.format_constraint(constraint),
490
+ )
491
+ else:
492
+ raise NotImplementedError(
493
+ "No generic 'DROP CONSTRAINT' in MySQL - "
494
+ "please specify constraint type"
495
+ )
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/oracle.py ADDED
@@ -0,0 +1,202 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import re
7
+ from typing import Any
8
+ from typing import Optional
9
+ from typing import TYPE_CHECKING
10
+
11
+ from sqlalchemy.sql import sqltypes
12
+
13
+ from .base import AddColumn
14
+ from .base import alter_table
15
+ from .base import ColumnComment
16
+ from .base import ColumnDefault
17
+ from .base import ColumnName
18
+ from .base import ColumnNullable
19
+ from .base import ColumnType
20
+ from .base import format_column_name
21
+ from .base import format_server_default
22
+ from .base import format_table_name
23
+ from .base import format_type
24
+ from .base import IdentityColumnDefault
25
+ from .base import RenameTable
26
+ from .impl import DefaultImpl
27
+ from ..util.sqla_compat import compiles
28
+
29
+ if TYPE_CHECKING:
30
+ from sqlalchemy.dialects.oracle.base import OracleDDLCompiler
31
+ from sqlalchemy.engine.cursor import CursorResult
32
+ from sqlalchemy.sql.schema import Column
33
+
34
+
35
+ class OracleImpl(DefaultImpl):
36
+ __dialect__ = "oracle"
37
+ transactional_ddl = False
38
+ batch_separator = "/"
39
+ command_terminator = ""
40
+ type_synonyms = DefaultImpl.type_synonyms + (
41
+ {"VARCHAR", "VARCHAR2"},
42
+ {"BIGINT", "INTEGER", "SMALLINT", "DECIMAL", "NUMERIC", "NUMBER"},
43
+ {"DOUBLE", "FLOAT", "DOUBLE_PRECISION"},
44
+ )
45
+ identity_attrs_ignore = ()
46
+
47
+ def __init__(self, *arg, **kw) -> None:
48
+ super().__init__(*arg, **kw)
49
+ self.batch_separator = self.context_opts.get(
50
+ "oracle_batch_separator", self.batch_separator
51
+ )
52
+
53
+ def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]:
54
+ result = super()._exec(construct, *args, **kw)
55
+ if self.as_sql and self.batch_separator:
56
+ self.static_output(self.batch_separator)
57
+ return result
58
+
59
+ def compare_server_default(
60
+ self,
61
+ inspector_column,
62
+ metadata_column,
63
+ rendered_metadata_default,
64
+ rendered_inspector_default,
65
+ ):
66
+ if rendered_metadata_default is not None:
67
+ rendered_metadata_default = re.sub(
68
+ r"^\((.+)\)$", r"\1", rendered_metadata_default
69
+ )
70
+
71
+ rendered_metadata_default = re.sub(
72
+ r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default
73
+ )
74
+
75
+ if rendered_inspector_default is not None:
76
+ rendered_inspector_default = re.sub(
77
+ r"^\((.+)\)$", r"\1", rendered_inspector_default
78
+ )
79
+
80
+ rendered_inspector_default = re.sub(
81
+ r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default
82
+ )
83
+
84
+ rendered_inspector_default = rendered_inspector_default.strip()
85
+ return rendered_inspector_default != rendered_metadata_default
86
+
87
+ def emit_begin(self) -> None:
88
+ self._exec("SET TRANSACTION READ WRITE")
89
+
90
+ def emit_commit(self) -> None:
91
+ self._exec("COMMIT")
92
+
93
+
94
+ @compiles(AddColumn, "oracle")
95
+ def visit_add_column(
96
+ element: AddColumn, compiler: OracleDDLCompiler, **kw
97
+ ) -> str:
98
+ return "%s %s" % (
99
+ alter_table(compiler, element.table_name, element.schema),
100
+ add_column(compiler, element.column, **kw),
101
+ )
102
+
103
+
104
+ @compiles(ColumnNullable, "oracle")
105
+ def visit_column_nullable(
106
+ element: ColumnNullable, compiler: OracleDDLCompiler, **kw
107
+ ) -> str:
108
+ return "%s %s %s" % (
109
+ alter_table(compiler, element.table_name, element.schema),
110
+ alter_column(compiler, element.column_name),
111
+ "NULL" if element.nullable else "NOT NULL",
112
+ )
113
+
114
+
115
+ @compiles(ColumnType, "oracle")
116
+ def visit_column_type(
117
+ element: ColumnType, compiler: OracleDDLCompiler, **kw
118
+ ) -> str:
119
+ return "%s %s %s" % (
120
+ alter_table(compiler, element.table_name, element.schema),
121
+ alter_column(compiler, element.column_name),
122
+ "%s" % format_type(compiler, element.type_),
123
+ )
124
+
125
+
126
+ @compiles(ColumnName, "oracle")
127
+ def visit_column_name(
128
+ element: ColumnName, compiler: OracleDDLCompiler, **kw
129
+ ) -> str:
130
+ return "%s RENAME COLUMN %s TO %s" % (
131
+ alter_table(compiler, element.table_name, element.schema),
132
+ format_column_name(compiler, element.column_name),
133
+ format_column_name(compiler, element.newname),
134
+ )
135
+
136
+
137
+ @compiles(ColumnDefault, "oracle")
138
+ def visit_column_default(
139
+ element: ColumnDefault, compiler: OracleDDLCompiler, **kw
140
+ ) -> str:
141
+ return "%s %s %s" % (
142
+ alter_table(compiler, element.table_name, element.schema),
143
+ alter_column(compiler, element.column_name),
144
+ (
145
+ "DEFAULT %s" % format_server_default(compiler, element.default)
146
+ if element.default is not None
147
+ else "DEFAULT NULL"
148
+ ),
149
+ )
150
+
151
+
152
+ @compiles(ColumnComment, "oracle")
153
+ def visit_column_comment(
154
+ element: ColumnComment, compiler: OracleDDLCompiler, **kw
155
+ ) -> str:
156
+ ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}"
157
+
158
+ comment = compiler.sql_compiler.render_literal_value(
159
+ (element.comment if element.comment is not None else ""),
160
+ sqltypes.String(),
161
+ )
162
+
163
+ return ddl.format(
164
+ table_name=element.table_name,
165
+ column_name=element.column_name,
166
+ comment=comment,
167
+ )
168
+
169
+
170
+ @compiles(RenameTable, "oracle")
171
+ def visit_rename_table(
172
+ element: RenameTable, compiler: OracleDDLCompiler, **kw
173
+ ) -> str:
174
+ return "%s RENAME TO %s" % (
175
+ alter_table(compiler, element.table_name, element.schema),
176
+ format_table_name(compiler, element.new_table_name, None),
177
+ )
178
+
179
+
180
+ def alter_column(compiler: OracleDDLCompiler, name: str) -> str:
181
+ return "MODIFY %s" % format_column_name(compiler, name)
182
+
183
+
184
+ def add_column(compiler: OracleDDLCompiler, column: Column[Any], **kw) -> str:
185
+ return "ADD %s" % compiler.get_column_specification(column, **kw)
186
+
187
+
188
+ @compiles(IdentityColumnDefault, "oracle")
189
+ def visit_identity_column(
190
+ element: IdentityColumnDefault, compiler: OracleDDLCompiler, **kw
191
+ ):
192
+ text = "%s %s " % (
193
+ alter_table(compiler, element.table_name, element.schema),
194
+ alter_column(compiler, element.column_name),
195
+ )
196
+ if element.default is None:
197
+ # drop identity
198
+ text += "DROP IDENTITY"
199
+ return text
200
+ else:
201
+ text += compiler.visit_identity_column(element.default)
202
+ return text
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/postgresql.py ADDED
@@ -0,0 +1,854 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import logging
7
+ import re
8
+ from typing import Any
9
+ from typing import cast
10
+ from typing import Dict
11
+ from typing import List
12
+ from typing import Optional
13
+ from typing import Sequence
14
+ from typing import Tuple
15
+ from typing import TYPE_CHECKING
16
+ from typing import Union
17
+
18
+ from sqlalchemy import Column
19
+ from sqlalchemy import Float
20
+ from sqlalchemy import Identity
21
+ from sqlalchemy import literal_column
22
+ from sqlalchemy import Numeric
23
+ from sqlalchemy import select
24
+ from sqlalchemy import text
25
+ from sqlalchemy import types as sqltypes
26
+ from sqlalchemy.dialects.postgresql import BIGINT
27
+ from sqlalchemy.dialects.postgresql import ExcludeConstraint
28
+ from sqlalchemy.dialects.postgresql import INTEGER
29
+ from sqlalchemy.schema import CreateIndex
30
+ from sqlalchemy.sql.elements import ColumnClause
31
+ from sqlalchemy.sql.elements import TextClause
32
+ from sqlalchemy.sql.functions import FunctionElement
33
+ from sqlalchemy.types import NULLTYPE
34
+
35
+ from .base import alter_column
36
+ from .base import alter_table
37
+ from .base import AlterColumn
38
+ from .base import ColumnComment
39
+ from .base import format_column_name
40
+ from .base import format_table_name
41
+ from .base import format_type
42
+ from .base import IdentityColumnDefault
43
+ from .base import RenameTable
44
+ from .impl import ComparisonResult
45
+ from .impl import DefaultImpl
46
+ from .. import util
47
+ from ..autogenerate import render
48
+ from ..operations import ops
49
+ from ..operations import schemaobj
50
+ from ..operations.base import BatchOperations
51
+ from ..operations.base import Operations
52
+ from ..util import sqla_compat
53
+ from ..util.sqla_compat import compiles
54
+
55
+
56
+ if TYPE_CHECKING:
57
+ from typing import Literal
58
+
59
+ from sqlalchemy import Index
60
+ from sqlalchemy import UniqueConstraint
61
+ from sqlalchemy.dialects.postgresql.array import ARRAY
62
+ from sqlalchemy.dialects.postgresql.base import PGDDLCompiler
63
+ from sqlalchemy.dialects.postgresql.hstore import HSTORE
64
+ from sqlalchemy.dialects.postgresql.json import JSON
65
+ from sqlalchemy.dialects.postgresql.json import JSONB
66
+ from sqlalchemy.sql.elements import ClauseElement
67
+ from sqlalchemy.sql.elements import ColumnElement
68
+ from sqlalchemy.sql.elements import quoted_name
69
+ from sqlalchemy.sql.schema import MetaData
70
+ from sqlalchemy.sql.schema import Table
71
+ from sqlalchemy.sql.type_api import TypeEngine
72
+
73
+ from .base import _ServerDefault
74
+ from ..autogenerate.api import AutogenContext
75
+ from ..autogenerate.render import _f_name
76
+ from ..runtime.migration import MigrationContext
77
+
78
+
79
+ log = logging.getLogger(__name__)
80
+
81
+
82
+ class PostgresqlImpl(DefaultImpl):
83
+ __dialect__ = "postgresql"
84
+ transactional_ddl = True
85
+ type_synonyms = DefaultImpl.type_synonyms + (
86
+ {"FLOAT", "DOUBLE PRECISION"},
87
+ )
88
+
89
+ def create_index(self, index: Index, **kw: Any) -> None:
90
+ # this likely defaults to None if not present, so get()
91
+ # should normally not return the default value. being
92
+ # defensive in any case
93
+ postgresql_include = index.kwargs.get("postgresql_include", None) or ()
94
+ for col in postgresql_include:
95
+ if col not in index.table.c: # type: ignore[union-attr]
96
+ index.table.append_column( # type: ignore[union-attr]
97
+ Column(col, sqltypes.NullType)
98
+ )
99
+ self._exec(CreateIndex(index, **kw))
100
+
101
+ def prep_table_for_batch(self, batch_impl, table):
102
+ for constraint in table.constraints:
103
+ if (
104
+ constraint.name is not None
105
+ and constraint.name in batch_impl.named_constraints
106
+ ):
107
+ self.drop_constraint(constraint)
108
+
109
+ def compare_server_default(
110
+ self,
111
+ inspector_column,
112
+ metadata_column,
113
+ rendered_metadata_default,
114
+ rendered_inspector_default,
115
+ ):
116
+ # don't do defaults for SERIAL columns
117
+ if (
118
+ metadata_column.primary_key
119
+ and metadata_column is metadata_column.table._autoincrement_column
120
+ ):
121
+ return False
122
+
123
+ conn_col_default = rendered_inspector_default
124
+
125
+ defaults_equal = conn_col_default == rendered_metadata_default
126
+ if defaults_equal:
127
+ return False
128
+
129
+ if None in (
130
+ conn_col_default,
131
+ rendered_metadata_default,
132
+ metadata_column.server_default,
133
+ ):
134
+ return not defaults_equal
135
+
136
+ metadata_default = metadata_column.server_default.arg
137
+
138
+ if isinstance(metadata_default, str):
139
+ if not isinstance(inspector_column.type, (Numeric, Float)):
140
+ metadata_default = re.sub(r"^'|'$", "", metadata_default)
141
+ metadata_default = f"'{metadata_default}'"
142
+
143
+ metadata_default = literal_column(metadata_default)
144
+
145
+ # run a real compare against the server
146
+ conn = self.connection
147
+ assert conn is not None
148
+ return not conn.scalar(
149
+ select(literal_column(conn_col_default) == metadata_default)
150
+ )
151
+
152
+ def alter_column(
153
+ self,
154
+ table_name: str,
155
+ column_name: str,
156
+ *,
157
+ nullable: Optional[bool] = None,
158
+ server_default: Optional[
159
+ Union[_ServerDefault, Literal[False]]
160
+ ] = False,
161
+ name: Optional[str] = None,
162
+ type_: Optional[TypeEngine] = None,
163
+ schema: Optional[str] = None,
164
+ autoincrement: Optional[bool] = None,
165
+ existing_type: Optional[TypeEngine] = None,
166
+ existing_server_default: Optional[_ServerDefault] = None,
167
+ existing_nullable: Optional[bool] = None,
168
+ existing_autoincrement: Optional[bool] = None,
169
+ **kw: Any,
170
+ ) -> None:
171
+ using = kw.pop("postgresql_using", None)
172
+
173
+ if using is not None and type_ is None:
174
+ raise util.CommandError(
175
+ "postgresql_using must be used with the type_ parameter"
176
+ )
177
+
178
+ if type_ is not None:
179
+ self._exec(
180
+ PostgresqlColumnType(
181
+ table_name,
182
+ column_name,
183
+ type_,
184
+ schema=schema,
185
+ using=using,
186
+ existing_type=existing_type,
187
+ existing_server_default=existing_server_default,
188
+ existing_nullable=existing_nullable,
189
+ )
190
+ )
191
+
192
+ super().alter_column(
193
+ table_name,
194
+ column_name,
195
+ nullable=nullable,
196
+ server_default=server_default,
197
+ name=name,
198
+ schema=schema,
199
+ autoincrement=autoincrement,
200
+ existing_type=existing_type,
201
+ existing_server_default=existing_server_default,
202
+ existing_nullable=existing_nullable,
203
+ existing_autoincrement=existing_autoincrement,
204
+ **kw,
205
+ )
206
+
207
+ def autogen_column_reflect(self, inspector, table, column_info):
208
+ if column_info.get("default") and isinstance(
209
+ column_info["type"], (INTEGER, BIGINT)
210
+ ):
211
+ seq_match = re.match(
212
+ r"nextval\('(.+?)'::regclass\)", column_info["default"]
213
+ )
214
+ if seq_match:
215
+ info = sqla_compat._exec_on_inspector(
216
+ inspector,
217
+ text(
218
+ "select c.relname, a.attname "
219
+ "from pg_class as c join "
220
+ "pg_depend d on d.objid=c.oid and "
221
+ "d.classid='pg_class'::regclass and "
222
+ "d.refclassid='pg_class'::regclass "
223
+ "join pg_class t on t.oid=d.refobjid "
224
+ "join pg_attribute a on a.attrelid=t.oid and "
225
+ "a.attnum=d.refobjsubid "
226
+ "where c.relkind='S' and "
227
+ "c.oid=cast(:seqname as regclass)"
228
+ ),
229
+ seqname=seq_match.group(1),
230
+ ).first()
231
+ if info:
232
+ seqname, colname = info
233
+ if colname == column_info["name"]:
234
+ log.info(
235
+ "Detected sequence named '%s' as "
236
+ "owned by integer column '%s(%s)', "
237
+ "assuming SERIAL and omitting",
238
+ seqname,
239
+ table.name,
240
+ colname,
241
+ )
242
+ # sequence, and the owner is this column,
243
+ # its a SERIAL - whack it!
244
+ del column_info["default"]
245
+
246
+ def correct_for_autogen_constraints(
247
+ self,
248
+ conn_unique_constraints,
249
+ conn_indexes,
250
+ metadata_unique_constraints,
251
+ metadata_indexes,
252
+ ):
253
+ doubled_constraints = {
254
+ index
255
+ for index in conn_indexes
256
+ if index.info.get("duplicates_constraint")
257
+ }
258
+
259
+ for ix in doubled_constraints:
260
+ conn_indexes.remove(ix)
261
+
262
+ if not sqla_compat.sqla_2:
263
+ self._skip_functional_indexes(metadata_indexes, conn_indexes)
264
+
265
+ # pg behavior regarding modifiers
266
+ # | # | compiled sql | returned sql | regexp. group is removed |
267
+ # | - | ---------------- | -----------------| ------------------------ |
268
+ # | 1 | nulls first | nulls first | - |
269
+ # | 2 | nulls last | | (?<! desc)( nulls last)$ |
270
+ # | 3 | asc | | ( asc)$ |
271
+ # | 4 | asc nulls first | nulls first | ( asc) nulls first$ |
272
+ # | 5 | asc nulls last | | ( asc nulls last)$ |
273
+ # | 6 | desc | desc | - |
274
+ # | 7 | desc nulls first | desc | desc( nulls first)$ |
275
+ # | 8 | desc nulls last | desc nulls last | - |
276
+ _default_modifiers_re = ( # order of case 2 and 5 matters
277
+ re.compile("( asc nulls last)$"), # case 5
278
+ re.compile("(?<! desc)( nulls last)$"), # case 2
279
+ re.compile("( asc)$"), # case 3
280
+ re.compile("( asc) nulls first$"), # case 4
281
+ re.compile(" desc( nulls first)$"), # case 7
282
+ )
283
+
284
+ def _cleanup_index_expr(self, index: Index, expr: str) -> str:
285
+ expr = expr.lower().replace('"', "").replace("'", "")
286
+ if index.table is not None:
287
+ # should not be needed, since include_table=False is in compile
288
+ expr = expr.replace(f"{index.table.name.lower()}.", "")
289
+
290
+ if "::" in expr:
291
+ # strip :: cast. types can have spaces in them
292
+ expr = re.sub(r"(::[\w ]+\w)", "", expr)
293
+
294
+ while expr and expr[0] == "(" and expr[-1] == ")":
295
+ expr = expr[1:-1]
296
+
297
+ # NOTE: when parsing the connection expression this cleanup could
298
+ # be skipped
299
+ for rs in self._default_modifiers_re:
300
+ if match := rs.search(expr):
301
+ start, end = match.span(1)
302
+ expr = expr[:start] + expr[end:]
303
+ break
304
+
305
+ while expr and expr[0] == "(" and expr[-1] == ")":
306
+ expr = expr[1:-1]
307
+
308
+ # strip casts
309
+ cast_re = re.compile(r"cast\s*\(")
310
+ if cast_re.match(expr):
311
+ expr = cast_re.sub("", expr)
312
+ # remove the as type
313
+ expr = re.sub(r"as\s+[^)]+\)", "", expr)
314
+ # remove spaces
315
+ expr = expr.replace(" ", "")
316
+ return expr
317
+
318
+ def _dialect_options(
319
+ self, item: Union[Index, UniqueConstraint]
320
+ ) -> Tuple[Any, ...]:
321
+ # only the positive case is returned by sqlalchemy reflection so
322
+ # None and False are threated the same
323
+ if item.dialect_kwargs.get("postgresql_nulls_not_distinct"):
324
+ return ("nulls_not_distinct",)
325
+ return ()
326
+
327
+ def compare_indexes(
328
+ self,
329
+ metadata_index: Index,
330
+ reflected_index: Index,
331
+ ) -> ComparisonResult:
332
+ msg = []
333
+ unique_msg = self._compare_index_unique(
334
+ metadata_index, reflected_index
335
+ )
336
+ if unique_msg:
337
+ msg.append(unique_msg)
338
+ m_exprs = metadata_index.expressions
339
+ r_exprs = reflected_index.expressions
340
+ if len(m_exprs) != len(r_exprs):
341
+ msg.append(f"expression number {len(r_exprs)} to {len(m_exprs)}")
342
+ if msg:
343
+ # no point going further, return early
344
+ return ComparisonResult.Different(msg)
345
+ skip = []
346
+ for pos, (m_e, r_e) in enumerate(zip(m_exprs, r_exprs), 1):
347
+ m_compile = self._compile_element(m_e)
348
+ m_text = self._cleanup_index_expr(metadata_index, m_compile)
349
+ # print(f"META ORIG: {m_compile!r} CLEANUP: {m_text!r}")
350
+ r_compile = self._compile_element(r_e)
351
+ r_text = self._cleanup_index_expr(metadata_index, r_compile)
352
+ # print(f"CONN ORIG: {r_compile!r} CLEANUP: {r_text!r}")
353
+ if m_text == r_text:
354
+ continue # expressions these are equal
355
+ elif m_compile.strip().endswith("_ops") and (
356
+ " " in m_compile or ")" in m_compile # is an expression
357
+ ):
358
+ skip.append(
359
+ f"expression #{pos} {m_compile!r} detected "
360
+ "as including operator clause."
361
+ )
362
+ util.warn(
363
+ f"Expression #{pos} {m_compile!r} in index "
364
+ f"{reflected_index.name!r} detected to include "
365
+ "an operator clause. Expression compare cannot proceed. "
366
+ "Please move the operator clause to the "
367
+ "``postgresql_ops`` dict to enable proper compare "
368
+ "of the index expressions: "
369
+ "https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#operator-classes", # noqa: E501
370
+ )
371
+ else:
372
+ msg.append(f"expression #{pos} {r_compile!r} to {m_compile!r}")
373
+
374
+ m_options = self._dialect_options(metadata_index)
375
+ r_options = self._dialect_options(reflected_index)
376
+ if m_options != r_options:
377
+ msg.extend(f"options {r_options} to {m_options}")
378
+
379
+ if msg:
380
+ return ComparisonResult.Different(msg)
381
+ elif skip:
382
+ # if there are other changes detected don't skip the index
383
+ return ComparisonResult.Skip(skip)
384
+ else:
385
+ return ComparisonResult.Equal()
386
+
387
+ def compare_unique_constraint(
388
+ self,
389
+ metadata_constraint: UniqueConstraint,
390
+ reflected_constraint: UniqueConstraint,
391
+ ) -> ComparisonResult:
392
+ metadata_tup = self._create_metadata_constraint_sig(
393
+ metadata_constraint
394
+ )
395
+ reflected_tup = self._create_reflected_constraint_sig(
396
+ reflected_constraint
397
+ )
398
+
399
+ meta_sig = metadata_tup.unnamed
400
+ conn_sig = reflected_tup.unnamed
401
+ if conn_sig != meta_sig:
402
+ return ComparisonResult.Different(
403
+ f"expression {conn_sig} to {meta_sig}"
404
+ )
405
+
406
+ metadata_do = self._dialect_options(metadata_tup.const)
407
+ conn_do = self._dialect_options(reflected_tup.const)
408
+ if metadata_do != conn_do:
409
+ return ComparisonResult.Different(
410
+ f"expression {conn_do} to {metadata_do}"
411
+ )
412
+
413
+ return ComparisonResult.Equal()
414
+
415
+ def adjust_reflected_dialect_options(
416
+ self, reflected_options: Dict[str, Any], kind: str
417
+ ) -> Dict[str, Any]:
418
+ options: Dict[str, Any]
419
+ options = reflected_options.get("dialect_options", {}).copy()
420
+ if not options.get("postgresql_include"):
421
+ options.pop("postgresql_include", None)
422
+ return options
423
+
424
+ def _compile_element(self, element: Union[ClauseElement, str]) -> str:
425
+ if isinstance(element, str):
426
+ return element
427
+ return element.compile(
428
+ dialect=self.dialect,
429
+ compile_kwargs={"literal_binds": True, "include_table": False},
430
+ ).string
431
+
432
+ def render_ddl_sql_expr(
433
+ self,
434
+ expr: ClauseElement,
435
+ is_server_default: bool = False,
436
+ is_index: bool = False,
437
+ **kw: Any,
438
+ ) -> str:
439
+ """Render a SQL expression that is typically a server default,
440
+ index expression, etc.
441
+
442
+ """
443
+
444
+ # apply self_group to index expressions;
445
+ # see https://github.com/sqlalchemy/sqlalchemy/blob/
446
+ # 82fa95cfce070fab401d020c6e6e4a6a96cc2578/
447
+ # lib/sqlalchemy/dialects/postgresql/base.py#L2261
448
+ if is_index and not isinstance(expr, ColumnClause):
449
+ expr = expr.self_group()
450
+
451
+ return super().render_ddl_sql_expr(
452
+ expr, is_server_default=is_server_default, is_index=is_index, **kw
453
+ )
454
+
455
+ def render_type(
456
+ self, type_: TypeEngine, autogen_context: AutogenContext
457
+ ) -> Union[str, Literal[False]]:
458
+ mod = type(type_).__module__
459
+ if not mod.startswith("sqlalchemy.dialects.postgresql"):
460
+ return False
461
+
462
+ if hasattr(self, "_render_%s_type" % type_.__visit_name__):
463
+ meth = getattr(self, "_render_%s_type" % type_.__visit_name__)
464
+ return meth(type_, autogen_context)
465
+
466
+ return False
467
+
468
+ def _render_HSTORE_type(
469
+ self, type_: HSTORE, autogen_context: AutogenContext
470
+ ) -> str:
471
+ return cast(
472
+ str,
473
+ render._render_type_w_subtype(
474
+ type_, autogen_context, "text_type", r"(.+?\(.*text_type=)"
475
+ ),
476
+ )
477
+
478
+ def _render_ARRAY_type(
479
+ self, type_: ARRAY, autogen_context: AutogenContext
480
+ ) -> str:
481
+ return cast(
482
+ str,
483
+ render._render_type_w_subtype(
484
+ type_, autogen_context, "item_type", r"(.+?\()"
485
+ ),
486
+ )
487
+
488
+ def _render_JSON_type(
489
+ self, type_: JSON, autogen_context: AutogenContext
490
+ ) -> str:
491
+ return cast(
492
+ str,
493
+ render._render_type_w_subtype(
494
+ type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)"
495
+ ),
496
+ )
497
+
498
+ def _render_JSONB_type(
499
+ self, type_: JSONB, autogen_context: AutogenContext
500
+ ) -> str:
501
+ return cast(
502
+ str,
503
+ render._render_type_w_subtype(
504
+ type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)"
505
+ ),
506
+ )
507
+
508
+
509
+ class PostgresqlColumnType(AlterColumn):
510
+ def __init__(
511
+ self, name: str, column_name: str, type_: TypeEngine, **kw
512
+ ) -> None:
513
+ using = kw.pop("using", None)
514
+ super().__init__(name, column_name, **kw)
515
+ self.type_ = sqltypes.to_instance(type_)
516
+ self.using = using
517
+
518
+
519
+ @compiles(RenameTable, "postgresql")
520
+ def visit_rename_table(
521
+ element: RenameTable, compiler: PGDDLCompiler, **kw
522
+ ) -> str:
523
+ return "%s RENAME TO %s" % (
524
+ alter_table(compiler, element.table_name, element.schema),
525
+ format_table_name(compiler, element.new_table_name, None),
526
+ )
527
+
528
+
529
+ @compiles(PostgresqlColumnType, "postgresql")
530
+ def visit_column_type(
531
+ element: PostgresqlColumnType, compiler: PGDDLCompiler, **kw
532
+ ) -> str:
533
+ return "%s %s %s %s" % (
534
+ alter_table(compiler, element.table_name, element.schema),
535
+ alter_column(compiler, element.column_name),
536
+ "TYPE %s" % format_type(compiler, element.type_),
537
+ "USING %s" % element.using if element.using else "",
538
+ )
539
+
540
+
541
+ @compiles(ColumnComment, "postgresql")
542
+ def visit_column_comment(
543
+ element: ColumnComment, compiler: PGDDLCompiler, **kw
544
+ ) -> str:
545
+ ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}"
546
+ comment = (
547
+ compiler.sql_compiler.render_literal_value(
548
+ element.comment, sqltypes.String()
549
+ )
550
+ if element.comment is not None
551
+ else "NULL"
552
+ )
553
+
554
+ return ddl.format(
555
+ table_name=format_table_name(
556
+ compiler, element.table_name, element.schema
557
+ ),
558
+ column_name=format_column_name(compiler, element.column_name),
559
+ comment=comment,
560
+ )
561
+
562
+
563
+ @compiles(IdentityColumnDefault, "postgresql")
564
+ def visit_identity_column(
565
+ element: IdentityColumnDefault, compiler: PGDDLCompiler, **kw
566
+ ):
567
+ text = "%s %s " % (
568
+ alter_table(compiler, element.table_name, element.schema),
569
+ alter_column(compiler, element.column_name),
570
+ )
571
+ if element.default is None:
572
+ # drop identity
573
+ text += "DROP IDENTITY"
574
+ return text
575
+ elif element.existing_server_default is None:
576
+ # add identity options
577
+ text += "ADD "
578
+ text += compiler.visit_identity_column(element.default)
579
+ return text
580
+ else:
581
+ # alter identity
582
+ diff, _, _ = element.impl._compare_identity_default(
583
+ element.default, element.existing_server_default
584
+ )
585
+ identity = element.default
586
+ for attr in sorted(diff):
587
+ if attr == "always":
588
+ text += "SET GENERATED %s " % (
589
+ "ALWAYS" if identity.always else "BY DEFAULT"
590
+ )
591
+ else:
592
+ text += "SET %s " % compiler.get_identity_options(
593
+ Identity(**{attr: getattr(identity, attr)})
594
+ )
595
+ return text
596
+
597
+
598
+ @Operations.register_operation("create_exclude_constraint")
599
+ @BatchOperations.register_operation(
600
+ "create_exclude_constraint", "batch_create_exclude_constraint"
601
+ )
602
+ @ops.AddConstraintOp.register_add_constraint("exclude_constraint")
603
+ class CreateExcludeConstraintOp(ops.AddConstraintOp):
604
+ """Represent a create exclude constraint operation."""
605
+
606
+ constraint_type = "exclude"
607
+
608
+ def __init__(
609
+ self,
610
+ constraint_name: sqla_compat._ConstraintName,
611
+ table_name: Union[str, quoted_name],
612
+ elements: Union[
613
+ Sequence[Tuple[str, str]],
614
+ Sequence[Tuple[ColumnClause[Any], str]],
615
+ ],
616
+ where: Optional[Union[ColumnElement[bool], str]] = None,
617
+ schema: Optional[str] = None,
618
+ _orig_constraint: Optional[ExcludeConstraint] = None,
619
+ **kw,
620
+ ) -> None:
621
+ self.constraint_name = constraint_name
622
+ self.table_name = table_name
623
+ self.elements = elements
624
+ self.where = where
625
+ self.schema = schema
626
+ self._orig_constraint = _orig_constraint
627
+ self.kw = kw
628
+
629
+ @classmethod
630
+ def from_constraint( # type:ignore[override]
631
+ cls, constraint: ExcludeConstraint
632
+ ) -> CreateExcludeConstraintOp:
633
+ constraint_table = sqla_compat._table_for_constraint(constraint)
634
+ return cls(
635
+ constraint.name,
636
+ constraint_table.name,
637
+ [ # type: ignore
638
+ (expr, op) for expr, name, op in constraint._render_exprs
639
+ ],
640
+ where=cast("ColumnElement[bool] | None", constraint.where),
641
+ schema=constraint_table.schema,
642
+ _orig_constraint=constraint,
643
+ deferrable=constraint.deferrable,
644
+ initially=constraint.initially,
645
+ using=constraint.using,
646
+ )
647
+
648
+ def to_constraint(
649
+ self, migration_context: Optional[MigrationContext] = None
650
+ ) -> ExcludeConstraint:
651
+ if self._orig_constraint is not None:
652
+ return self._orig_constraint
653
+ schema_obj = schemaobj.SchemaObjects(migration_context)
654
+ t = schema_obj.table(self.table_name, schema=self.schema)
655
+ excl = ExcludeConstraint(
656
+ *self.elements,
657
+ name=self.constraint_name,
658
+ where=self.where,
659
+ **self.kw,
660
+ )
661
+ for (
662
+ expr,
663
+ name,
664
+ oper,
665
+ ) in excl._render_exprs:
666
+ t.append_column(Column(name, NULLTYPE))
667
+ t.append_constraint(excl)
668
+ return excl
669
+
670
+ @classmethod
671
+ def create_exclude_constraint(
672
+ cls,
673
+ operations: Operations,
674
+ constraint_name: str,
675
+ table_name: str,
676
+ *elements: Any,
677
+ **kw: Any,
678
+ ) -> Optional[Table]:
679
+ """Issue an alter to create an EXCLUDE constraint using the
680
+ current migration context.
681
+
682
+ .. note:: This method is Postgresql specific, and additionally
683
+ requires at least SQLAlchemy 1.0.
684
+
685
+ e.g.::
686
+
687
+ from alembic import op
688
+
689
+ op.create_exclude_constraint(
690
+ "user_excl",
691
+ "user",
692
+ ("period", "&&"),
693
+ ("group", "="),
694
+ where=("group != 'some group'"),
695
+ )
696
+
697
+ Note that the expressions work the same way as that of
698
+ the ``ExcludeConstraint`` object itself; if plain strings are
699
+ passed, quoting rules must be applied manually.
700
+
701
+ :param name: Name of the constraint.
702
+ :param table_name: String name of the source table.
703
+ :param elements: exclude conditions.
704
+ :param where: SQL expression or SQL string with optional WHERE
705
+ clause.
706
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
707
+ NOT DEFERRABLE when issuing DDL for this constraint.
708
+ :param initially: optional string. If set, emit INITIALLY <value>
709
+ when issuing DDL for this constraint.
710
+ :param schema: Optional schema name to operate within.
711
+
712
+ """
713
+ op = cls(constraint_name, table_name, elements, **kw)
714
+ return operations.invoke(op)
715
+
716
+ @classmethod
717
+ def batch_create_exclude_constraint(
718
+ cls,
719
+ operations: BatchOperations,
720
+ constraint_name: str,
721
+ *elements: Any,
722
+ **kw: Any,
723
+ ) -> Optional[Table]:
724
+ """Issue a "create exclude constraint" instruction using the
725
+ current batch migration context.
726
+
727
+ .. note:: This method is Postgresql specific, and additionally
728
+ requires at least SQLAlchemy 1.0.
729
+
730
+ .. seealso::
731
+
732
+ :meth:`.Operations.create_exclude_constraint`
733
+
734
+ """
735
+ kw["schema"] = operations.impl.schema
736
+ op = cls(constraint_name, operations.impl.table_name, elements, **kw)
737
+ return operations.invoke(op)
738
+
739
+
740
+ @render.renderers.dispatch_for(CreateExcludeConstraintOp)
741
+ def _add_exclude_constraint(
742
+ autogen_context: AutogenContext, op: CreateExcludeConstraintOp
743
+ ) -> str:
744
+ return _exclude_constraint(op.to_constraint(), autogen_context, alter=True)
745
+
746
+
747
+ @render._constraint_renderers.dispatch_for(ExcludeConstraint)
748
+ def _render_inline_exclude_constraint(
749
+ constraint: ExcludeConstraint,
750
+ autogen_context: AutogenContext,
751
+ namespace_metadata: MetaData,
752
+ ) -> str:
753
+ rendered = render._user_defined_render(
754
+ "exclude", constraint, autogen_context
755
+ )
756
+ if rendered is not False:
757
+ return rendered
758
+
759
+ return _exclude_constraint(constraint, autogen_context, False)
760
+
761
+
762
+ def _postgresql_autogenerate_prefix(autogen_context: AutogenContext) -> str:
763
+ imports = autogen_context.imports
764
+ if imports is not None:
765
+ imports.add("from sqlalchemy.dialects import postgresql")
766
+ return "postgresql."
767
+
768
+
769
+ def _exclude_constraint(
770
+ constraint: ExcludeConstraint,
771
+ autogen_context: AutogenContext,
772
+ alter: bool,
773
+ ) -> str:
774
+ opts: List[Tuple[str, Union[quoted_name, str, _f_name, None]]] = []
775
+
776
+ has_batch = autogen_context._has_batch
777
+
778
+ if constraint.deferrable:
779
+ opts.append(("deferrable", str(constraint.deferrable)))
780
+ if constraint.initially:
781
+ opts.append(("initially", str(constraint.initially)))
782
+ if constraint.using:
783
+ opts.append(("using", str(constraint.using)))
784
+ if not has_batch and alter and constraint.table.schema:
785
+ opts.append(("schema", render._ident(constraint.table.schema)))
786
+ if not alter and constraint.name:
787
+ opts.append(
788
+ ("name", render._render_gen_name(autogen_context, constraint.name))
789
+ )
790
+
791
+ def do_expr_where_opts():
792
+ args = [
793
+ "(%s, %r)"
794
+ % (
795
+ _render_potential_column(
796
+ sqltext, # type:ignore[arg-type]
797
+ autogen_context,
798
+ ),
799
+ opstring,
800
+ )
801
+ for sqltext, name, opstring in constraint._render_exprs
802
+ ]
803
+ if constraint.where is not None:
804
+ args.append(
805
+ "where=%s"
806
+ % render._render_potential_expr(
807
+ constraint.where, autogen_context
808
+ )
809
+ )
810
+ args.extend(["%s=%r" % (k, v) for k, v in opts])
811
+ return args
812
+
813
+ if alter:
814
+ args = [
815
+ repr(render._render_gen_name(autogen_context, constraint.name))
816
+ ]
817
+ if not has_batch:
818
+ args += [repr(render._ident(constraint.table.name))]
819
+ args.extend(do_expr_where_opts())
820
+ return "%(prefix)screate_exclude_constraint(%(args)s)" % {
821
+ "prefix": render._alembic_autogenerate_prefix(autogen_context),
822
+ "args": ", ".join(args),
823
+ }
824
+ else:
825
+ args = do_expr_where_opts()
826
+ return "%(prefix)sExcludeConstraint(%(args)s)" % {
827
+ "prefix": _postgresql_autogenerate_prefix(autogen_context),
828
+ "args": ", ".join(args),
829
+ }
830
+
831
+
832
+ def _render_potential_column(
833
+ value: Union[
834
+ ColumnClause[Any], Column[Any], TextClause, FunctionElement[Any]
835
+ ],
836
+ autogen_context: AutogenContext,
837
+ ) -> str:
838
+ if isinstance(value, ColumnClause):
839
+ if value.is_literal:
840
+ # like literal_column("int8range(from, to)") in ExcludeConstraint
841
+ template = "%(prefix)sliteral_column(%(name)r)"
842
+ else:
843
+ template = "%(prefix)scolumn(%(name)r)"
844
+
845
+ return template % {
846
+ "prefix": render._sqlalchemy_autogenerate_prefix(autogen_context),
847
+ "name": value.name,
848
+ }
849
+ else:
850
+ return render._render_potential_expr(
851
+ value,
852
+ autogen_context,
853
+ wrap_in_element=isinstance(value, (TextClause, FunctionElement)),
854
+ )
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/ddl/sqlite.py ADDED
@@ -0,0 +1,237 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import re
7
+ from typing import Any
8
+ from typing import Dict
9
+ from typing import Optional
10
+ from typing import TYPE_CHECKING
11
+ from typing import Union
12
+
13
+ from sqlalchemy import cast
14
+ from sqlalchemy import Computed
15
+ from sqlalchemy import JSON
16
+ from sqlalchemy import schema
17
+ from sqlalchemy import sql
18
+
19
+ from .base import alter_table
20
+ from .base import ColumnName
21
+ from .base import format_column_name
22
+ from .base import format_table_name
23
+ from .base import RenameTable
24
+ from .impl import DefaultImpl
25
+ from .. import util
26
+ from ..util.sqla_compat import compiles
27
+
28
+ if TYPE_CHECKING:
29
+ from sqlalchemy.engine.reflection import Inspector
30
+ from sqlalchemy.sql.compiler import DDLCompiler
31
+ from sqlalchemy.sql.elements import Cast
32
+ from sqlalchemy.sql.elements import ClauseElement
33
+ from sqlalchemy.sql.schema import Column
34
+ from sqlalchemy.sql.schema import Constraint
35
+ from sqlalchemy.sql.schema import Table
36
+ from sqlalchemy.sql.type_api import TypeEngine
37
+
38
+ from ..operations.batch import BatchOperationsImpl
39
+
40
+
41
+ class SQLiteImpl(DefaultImpl):
42
+ __dialect__ = "sqlite"
43
+
44
+ transactional_ddl = False
45
+ """SQLite supports transactional DDL, but pysqlite does not:
46
+ see: http://bugs.python.org/issue10740
47
+ """
48
+
49
+ def requires_recreate_in_batch(
50
+ self, batch_op: BatchOperationsImpl
51
+ ) -> bool:
52
+ """Return True if the given :class:`.BatchOperationsImpl`
53
+ would need the table to be recreated and copied in order to
54
+ proceed.
55
+
56
+ Normally, only returns True on SQLite when operations other
57
+ than add_column are present.
58
+
59
+ """
60
+ for op in batch_op.batch:
61
+ if op[0] == "add_column":
62
+ col = op[1][1]
63
+ if isinstance(
64
+ col.server_default, schema.DefaultClause
65
+ ) and isinstance(col.server_default.arg, sql.ClauseElement):
66
+ return True
67
+ elif (
68
+ isinstance(col.server_default, Computed)
69
+ and col.server_default.persisted
70
+ ):
71
+ return True
72
+ elif op[0] not in ("create_index", "drop_index"):
73
+ return True
74
+ else:
75
+ return False
76
+
77
+ def add_constraint(self, const: Constraint):
78
+ # attempt to distinguish between an
79
+ # auto-gen constraint and an explicit one
80
+ if const._create_rule is None:
81
+ raise NotImplementedError(
82
+ "No support for ALTER of constraints in SQLite dialect. "
83
+ "Please refer to the batch mode feature which allows for "
84
+ "SQLite migrations using a copy-and-move strategy."
85
+ )
86
+ elif const._create_rule(self):
87
+ util.warn(
88
+ "Skipping unsupported ALTER for "
89
+ "creation of implicit constraint. "
90
+ "Please refer to the batch mode feature which allows for "
91
+ "SQLite migrations using a copy-and-move strategy."
92
+ )
93
+
94
+ def drop_constraint(self, const: Constraint, **kw: Any):
95
+ if const._create_rule is None:
96
+ raise NotImplementedError(
97
+ "No support for ALTER of constraints in SQLite dialect. "
98
+ "Please refer to the batch mode feature which allows for "
99
+ "SQLite migrations using a copy-and-move strategy."
100
+ )
101
+
102
+ def compare_server_default(
103
+ self,
104
+ inspector_column: Column[Any],
105
+ metadata_column: Column[Any],
106
+ rendered_metadata_default: Optional[str],
107
+ rendered_inspector_default: Optional[str],
108
+ ) -> bool:
109
+ if rendered_metadata_default is not None:
110
+ rendered_metadata_default = re.sub(
111
+ r"^\((.+)\)$", r"\1", rendered_metadata_default
112
+ )
113
+
114
+ rendered_metadata_default = re.sub(
115
+ r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default
116
+ )
117
+
118
+ if rendered_inspector_default is not None:
119
+ rendered_inspector_default = re.sub(
120
+ r"^\((.+)\)$", r"\1", rendered_inspector_default
121
+ )
122
+
123
+ rendered_inspector_default = re.sub(
124
+ r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default
125
+ )
126
+
127
+ return rendered_inspector_default != rendered_metadata_default
128
+
129
+ def _guess_if_default_is_unparenthesized_sql_expr(
130
+ self, expr: Optional[str]
131
+ ) -> bool:
132
+ """Determine if a server default is a SQL expression or a constant.
133
+
134
+ There are too many assertions that expect server defaults to round-trip
135
+ identically without parenthesis added so we will add parens only in
136
+ very specific cases.
137
+
138
+ """
139
+ if not expr:
140
+ return False
141
+ elif re.match(r"^[0-9\.]$", expr):
142
+ return False
143
+ elif re.match(r"^'.+'$", expr):
144
+ return False
145
+ elif re.match(r"^\(.+\)$", expr):
146
+ return False
147
+ else:
148
+ return True
149
+
150
+ def autogen_column_reflect(
151
+ self,
152
+ inspector: Inspector,
153
+ table: Table,
154
+ column_info: Dict[str, Any],
155
+ ) -> None:
156
+ # SQLite expression defaults require parenthesis when sent
157
+ # as DDL
158
+ if self._guess_if_default_is_unparenthesized_sql_expr(
159
+ column_info.get("default", None)
160
+ ):
161
+ column_info["default"] = "(%s)" % (column_info["default"],)
162
+
163
+ def render_ddl_sql_expr(
164
+ self, expr: ClauseElement, is_server_default: bool = False, **kw
165
+ ) -> str:
166
+ # SQLite expression defaults require parenthesis when sent
167
+ # as DDL
168
+ str_expr = super().render_ddl_sql_expr(
169
+ expr, is_server_default=is_server_default, **kw
170
+ )
171
+
172
+ if (
173
+ is_server_default
174
+ and self._guess_if_default_is_unparenthesized_sql_expr(str_expr)
175
+ ):
176
+ str_expr = "(%s)" % (str_expr,)
177
+ return str_expr
178
+
179
+ def cast_for_batch_migrate(
180
+ self,
181
+ existing: Column[Any],
182
+ existing_transfer: Dict[str, Union[TypeEngine, Cast]],
183
+ new_type: TypeEngine,
184
+ ) -> None:
185
+ if (
186
+ existing.type._type_affinity is not new_type._type_affinity
187
+ and not isinstance(new_type, JSON)
188
+ ):
189
+ existing_transfer["expr"] = cast(
190
+ existing_transfer["expr"], new_type
191
+ )
192
+
193
+ def correct_for_autogen_constraints(
194
+ self,
195
+ conn_unique_constraints,
196
+ conn_indexes,
197
+ metadata_unique_constraints,
198
+ metadata_indexes,
199
+ ):
200
+ self._skip_functional_indexes(metadata_indexes, conn_indexes)
201
+
202
+
203
+ @compiles(RenameTable, "sqlite")
204
+ def visit_rename_table(
205
+ element: RenameTable, compiler: DDLCompiler, **kw
206
+ ) -> str:
207
+ return "%s RENAME TO %s" % (
208
+ alter_table(compiler, element.table_name, element.schema),
209
+ format_table_name(compiler, element.new_table_name, None),
210
+ )
211
+
212
+
213
+ @compiles(ColumnName, "sqlite")
214
+ def visit_column_name(element: ColumnName, compiler: DDLCompiler, **kw) -> str:
215
+ return "%s RENAME COLUMN %s TO %s" % (
216
+ alter_table(compiler, element.table_name, element.schema),
217
+ format_column_name(compiler, element.column_name),
218
+ format_column_name(compiler, element.newname),
219
+ )
220
+
221
+
222
+ # @compiles(AddColumn, 'sqlite')
223
+ # def visit_add_column(element, compiler, **kw):
224
+ # return "%s %s" % (
225
+ # alter_table(compiler, element.table_name, element.schema),
226
+ # add_column(compiler, element.column, **kw)
227
+ # )
228
+
229
+
230
+ # def add_column(compiler, column, **kw):
231
+ # text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw)
232
+ # need to modify SQLAlchemy so that the CHECK associated with a Boolean
233
+ # or Enum gets placed as part of the column constraints, not the Table
234
+ # see ticket 98
235
+ # for const in column.constraints:
236
+ # text += compiler.process(AddConstraint(const))
237
+ # return text
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/environment.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from .runtime.environment import * # noqa
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/migration.py ADDED
@@ -0,0 +1 @@
 
 
1
+ from .runtime.migration import * # noqa
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/op.py ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ from .operations.base import Operations
2
+
3
+ # create proxy functions for
4
+ # each method on the Operations class.
5
+ Operations.create_module_class_proxy(globals(), locals())
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/op.pyi ADDED
@@ -0,0 +1,1356 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # ### this file stubs are generated by tools/write_pyi.py - do not edit ###
2
+ # ### imports are manually managed
3
+ from __future__ import annotations
4
+
5
+ from contextlib import contextmanager
6
+ from typing import Any
7
+ from typing import Awaitable
8
+ from typing import Callable
9
+ from typing import Dict
10
+ from typing import Iterator
11
+ from typing import List
12
+ from typing import Literal
13
+ from typing import Mapping
14
+ from typing import Optional
15
+ from typing import overload
16
+ from typing import Sequence
17
+ from typing import Tuple
18
+ from typing import Type
19
+ from typing import TYPE_CHECKING
20
+ from typing import TypeVar
21
+ from typing import Union
22
+
23
+ if TYPE_CHECKING:
24
+ from sqlalchemy.engine import Connection
25
+ from sqlalchemy.sql import Executable
26
+ from sqlalchemy.sql.elements import ColumnElement
27
+ from sqlalchemy.sql.elements import conv
28
+ from sqlalchemy.sql.elements import TextClause
29
+ from sqlalchemy.sql.expression import TableClause
30
+ from sqlalchemy.sql.schema import Column
31
+ from sqlalchemy.sql.schema import Computed
32
+ from sqlalchemy.sql.schema import Identity
33
+ from sqlalchemy.sql.schema import SchemaItem
34
+ from sqlalchemy.sql.schema import Table
35
+ from sqlalchemy.sql.type_api import TypeEngine
36
+ from sqlalchemy.util import immutabledict
37
+
38
+ from .operations.base import BatchOperations
39
+ from .operations.ops import AddColumnOp
40
+ from .operations.ops import AddConstraintOp
41
+ from .operations.ops import AlterColumnOp
42
+ from .operations.ops import AlterTableOp
43
+ from .operations.ops import BulkInsertOp
44
+ from .operations.ops import CreateIndexOp
45
+ from .operations.ops import CreateTableCommentOp
46
+ from .operations.ops import CreateTableOp
47
+ from .operations.ops import DropColumnOp
48
+ from .operations.ops import DropConstraintOp
49
+ from .operations.ops import DropIndexOp
50
+ from .operations.ops import DropTableCommentOp
51
+ from .operations.ops import DropTableOp
52
+ from .operations.ops import ExecuteSQLOp
53
+ from .operations.ops import MigrateOperation
54
+ from .runtime.migration import MigrationContext
55
+ from .util.sqla_compat import _literal_bindparam
56
+
57
+ _T = TypeVar("_T")
58
+ _C = TypeVar("_C", bound=Callable[..., Any])
59
+
60
+ ### end imports ###
61
+
62
+ def add_column(
63
+ table_name: str,
64
+ column: Column[Any],
65
+ *,
66
+ schema: Optional[str] = None,
67
+ if_not_exists: Optional[bool] = None,
68
+ ) -> None:
69
+ """Issue an "add column" instruction using the current
70
+ migration context.
71
+
72
+ e.g.::
73
+
74
+ from alembic import op
75
+ from sqlalchemy import Column, String
76
+
77
+ op.add_column("organization", Column("name", String()))
78
+
79
+ The :meth:`.Operations.add_column` method typically corresponds
80
+ to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope
81
+ of this command, the column's name, datatype, nullability,
82
+ and optional server-generated defaults may be indicated.
83
+
84
+ .. note::
85
+
86
+ With the exception of NOT NULL constraints or single-column FOREIGN
87
+ KEY constraints, other kinds of constraints such as PRIMARY KEY,
88
+ UNIQUE or CHECK constraints **cannot** be generated using this
89
+ method; for these constraints, refer to operations such as
90
+ :meth:`.Operations.create_primary_key` and
91
+ :meth:`.Operations.create_check_constraint`. In particular, the
92
+ following :class:`~sqlalchemy.schema.Column` parameters are
93
+ **ignored**:
94
+
95
+ * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
96
+ typically do not support an ALTER operation that can add
97
+ individual columns one at a time to an existing primary key
98
+ constraint, therefore it's less ambiguous to use the
99
+ :meth:`.Operations.create_primary_key` method, which assumes no
100
+ existing primary key constraint is present.
101
+ * :paramref:`~sqlalchemy.schema.Column.unique` - use the
102
+ :meth:`.Operations.create_unique_constraint` method
103
+ * :paramref:`~sqlalchemy.schema.Column.index` - use the
104
+ :meth:`.Operations.create_index` method
105
+
106
+
107
+ The provided :class:`~sqlalchemy.schema.Column` object may include a
108
+ :class:`~sqlalchemy.schema.ForeignKey` constraint directive,
109
+ referencing a remote table name. For this specific type of constraint,
110
+ Alembic will automatically emit a second ALTER statement in order to
111
+ add the single-column FOREIGN KEY constraint separately::
112
+
113
+ from alembic import op
114
+ from sqlalchemy import Column, INTEGER, ForeignKey
115
+
116
+ op.add_column(
117
+ "organization",
118
+ Column("account_id", INTEGER, ForeignKey("accounts.id")),
119
+ )
120
+
121
+ The column argument passed to :meth:`.Operations.add_column` is a
122
+ :class:`~sqlalchemy.schema.Column` construct, used in the same way it's
123
+ used in SQLAlchemy. In particular, values or functions to be indicated
124
+ as producing the column's default value on the database side are
125
+ specified using the ``server_default`` parameter, and not ``default``
126
+ which only specifies Python-side defaults::
127
+
128
+ from alembic import op
129
+ from sqlalchemy import Column, TIMESTAMP, func
130
+
131
+ # specify "DEFAULT NOW" along with the column add
132
+ op.add_column(
133
+ "account",
134
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
135
+ )
136
+
137
+ :param table_name: String name of the parent table.
138
+ :param column: a :class:`sqlalchemy.schema.Column` object
139
+ representing the new column.
140
+ :param schema: Optional schema name to operate within. To control
141
+ quoting of the schema outside of the default behavior, use
142
+ the SQLAlchemy construct
143
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
144
+ :param if_not_exists: If True, adds IF NOT EXISTS operator
145
+ when creating the new column for compatible dialects
146
+
147
+ .. versionadded:: 1.16.0
148
+
149
+ """
150
+
151
+ def alter_column(
152
+ table_name: str,
153
+ column_name: str,
154
+ *,
155
+ nullable: Optional[bool] = None,
156
+ comment: Union[str, Literal[False], None] = False,
157
+ server_default: Union[
158
+ str, bool, Identity, Computed, TextClause, None
159
+ ] = False,
160
+ new_column_name: Optional[str] = None,
161
+ type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None,
162
+ existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None,
163
+ existing_server_default: Union[
164
+ str, bool, Identity, Computed, TextClause, None
165
+ ] = False,
166
+ existing_nullable: Optional[bool] = None,
167
+ existing_comment: Optional[str] = None,
168
+ schema: Optional[str] = None,
169
+ **kw: Any,
170
+ ) -> None:
171
+ r"""Issue an "alter column" instruction using the
172
+ current migration context.
173
+
174
+ Generally, only that aspect of the column which
175
+ is being changed, i.e. name, type, nullability,
176
+ default, needs to be specified. Multiple changes
177
+ can also be specified at once and the backend should
178
+ "do the right thing", emitting each change either
179
+ separately or together as the backend allows.
180
+
181
+ MySQL has special requirements here, since MySQL
182
+ cannot ALTER a column without a full specification.
183
+ When producing MySQL-compatible migration files,
184
+ it is recommended that the ``existing_type``,
185
+ ``existing_server_default``, and ``existing_nullable``
186
+ parameters be present, if not being altered.
187
+
188
+ Type changes which are against the SQLAlchemy
189
+ "schema" types :class:`~sqlalchemy.types.Boolean`
190
+ and :class:`~sqlalchemy.types.Enum` may also
191
+ add or drop constraints which accompany those
192
+ types on backends that don't support them natively.
193
+ The ``existing_type`` argument is
194
+ used in this case to identify and remove a previous
195
+ constraint that was bound to the type object.
196
+
197
+ :param table_name: string name of the target table.
198
+ :param column_name: string name of the target column,
199
+ as it exists before the operation begins.
200
+ :param nullable: Optional; specify ``True`` or ``False``
201
+ to alter the column's nullability.
202
+ :param server_default: Optional; specify a string
203
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
204
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
205
+ an alteration to the column's default value.
206
+ Set to ``None`` to have the default removed.
207
+ :param comment: optional string text of a new comment to add to the
208
+ column.
209
+ :param new_column_name: Optional; specify a string name here to
210
+ indicate the new name within a column rename operation.
211
+ :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
212
+ type object to specify a change to the column's type.
213
+ For SQLAlchemy types that also indicate a constraint (i.e.
214
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
215
+ the constraint is also generated.
216
+ :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
217
+ currently understood by the MySQL dialect.
218
+ :param existing_type: Optional; a
219
+ :class:`~sqlalchemy.types.TypeEngine`
220
+ type object to specify the previous type. This
221
+ is required for all MySQL column alter operations that
222
+ don't otherwise specify a new type, as well as for
223
+ when nullability is being changed on a SQL Server
224
+ column. It is also used if the type is a so-called
225
+ SQLAlchemy "schema" type which may define a constraint (i.e.
226
+ :class:`~sqlalchemy.types.Boolean`,
227
+ :class:`~sqlalchemy.types.Enum`),
228
+ so that the constraint can be dropped.
229
+ :param existing_server_default: Optional; The existing
230
+ default value of the column. Required on MySQL if
231
+ an existing default is not being changed; else MySQL
232
+ removes the default.
233
+ :param existing_nullable: Optional; the existing nullability
234
+ of the column. Required on MySQL if the existing nullability
235
+ is not being changed; else MySQL sets this to NULL.
236
+ :param existing_autoincrement: Optional; the existing autoincrement
237
+ of the column. Used for MySQL's system of altering a column
238
+ that specifies ``AUTO_INCREMENT``.
239
+ :param existing_comment: string text of the existing comment on the
240
+ column to be maintained. Required on MySQL if the existing comment
241
+ on the column is not being changed.
242
+ :param schema: Optional schema name to operate within. To control
243
+ quoting of the schema outside of the default behavior, use
244
+ the SQLAlchemy construct
245
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
246
+ :param postgresql_using: String argument which will indicate a
247
+ SQL expression to render within the Postgresql-specific USING clause
248
+ within ALTER COLUMN. This string is taken directly as raw SQL which
249
+ must explicitly include any necessary quoting or escaping of tokens
250
+ within the expression.
251
+
252
+ """
253
+
254
+ @contextmanager
255
+ def batch_alter_table(
256
+ table_name: str,
257
+ schema: Optional[str] = None,
258
+ recreate: Literal["auto", "always", "never"] = "auto",
259
+ partial_reordering: Optional[Tuple[Any, ...]] = None,
260
+ copy_from: Optional[Table] = None,
261
+ table_args: Tuple[Any, ...] = (),
262
+ table_kwargs: Mapping[str, Any] = immutabledict({}),
263
+ reflect_args: Tuple[Any, ...] = (),
264
+ reflect_kwargs: Mapping[str, Any] = immutabledict({}),
265
+ naming_convention: Optional[Dict[str, str]] = None,
266
+ ) -> Iterator[BatchOperations]:
267
+ """Invoke a series of per-table migrations in batch.
268
+
269
+ Batch mode allows a series of operations specific to a table
270
+ to be syntactically grouped together, and allows for alternate
271
+ modes of table migration, in particular the "recreate" style of
272
+ migration required by SQLite.
273
+
274
+ "recreate" style is as follows:
275
+
276
+ 1. A new table is created with the new specification, based on the
277
+ migration directives within the batch, using a temporary name.
278
+
279
+ 2. the data copied from the existing table to the new table.
280
+
281
+ 3. the existing table is dropped.
282
+
283
+ 4. the new table is renamed to the existing table name.
284
+
285
+ The directive by default will only use "recreate" style on the
286
+ SQLite backend, and only if directives are present which require
287
+ this form, e.g. anything other than ``add_column()``. The batch
288
+ operation on other backends will proceed using standard ALTER TABLE
289
+ operations.
290
+
291
+ The method is used as a context manager, which returns an instance
292
+ of :class:`.BatchOperations`; this object is the same as
293
+ :class:`.Operations` except that table names and schema names
294
+ are omitted. E.g.::
295
+
296
+ with op.batch_alter_table("some_table") as batch_op:
297
+ batch_op.add_column(Column("foo", Integer))
298
+ batch_op.drop_column("bar")
299
+
300
+ The operations within the context manager are invoked at once
301
+ when the context is ended. When run against SQLite, if the
302
+ migrations include operations not supported by SQLite's ALTER TABLE,
303
+ the entire table will be copied to a new one with the new
304
+ specification, moving all data across as well.
305
+
306
+ The copy operation by default uses reflection to retrieve the current
307
+ structure of the table, and therefore :meth:`.batch_alter_table`
308
+ in this mode requires that the migration is run in "online" mode.
309
+ The ``copy_from`` parameter may be passed which refers to an existing
310
+ :class:`.Table` object, which will bypass this reflection step.
311
+
312
+ .. note:: The table copy operation will currently not copy
313
+ CHECK constraints, and may not copy UNIQUE constraints that are
314
+ unnamed, as is possible on SQLite. See the section
315
+ :ref:`sqlite_batch_constraints` for workarounds.
316
+
317
+ :param table_name: name of table
318
+ :param schema: optional schema name.
319
+ :param recreate: under what circumstances the table should be
320
+ recreated. At its default of ``"auto"``, the SQLite dialect will
321
+ recreate the table if any operations other than ``add_column()``,
322
+ ``create_index()``, or ``drop_index()`` are
323
+ present. Other options include ``"always"`` and ``"never"``.
324
+ :param copy_from: optional :class:`~sqlalchemy.schema.Table` object
325
+ that will act as the structure of the table being copied. If omitted,
326
+ table reflection is used to retrieve the structure of the table.
327
+
328
+ .. seealso::
329
+
330
+ :ref:`batch_offline_mode`
331
+
332
+ :paramref:`~.Operations.batch_alter_table.reflect_args`
333
+
334
+ :paramref:`~.Operations.batch_alter_table.reflect_kwargs`
335
+
336
+ :param reflect_args: a sequence of additional positional arguments that
337
+ will be applied to the table structure being reflected / copied;
338
+ this may be used to pass column and constraint overrides to the
339
+ table that will be reflected, in lieu of passing the whole
340
+ :class:`~sqlalchemy.schema.Table` using
341
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
342
+ :param reflect_kwargs: a dictionary of additional keyword arguments
343
+ that will be applied to the table structure being copied; this may be
344
+ used to pass additional table and reflection options to the table that
345
+ will be reflected, in lieu of passing the whole
346
+ :class:`~sqlalchemy.schema.Table` using
347
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
348
+ :param table_args: a sequence of additional positional arguments that
349
+ will be applied to the new :class:`~sqlalchemy.schema.Table` when
350
+ created, in addition to those copied from the source table.
351
+ This may be used to provide additional constraints such as CHECK
352
+ constraints that may not be reflected.
353
+ :param table_kwargs: a dictionary of additional keyword arguments
354
+ that will be applied to the new :class:`~sqlalchemy.schema.Table`
355
+ when created, in addition to those copied from the source table.
356
+ This may be used to provide for additional table options that may
357
+ not be reflected.
358
+ :param naming_convention: a naming convention dictionary of the form
359
+ described at :ref:`autogen_naming_conventions` which will be applied
360
+ to the :class:`~sqlalchemy.schema.MetaData` during the reflection
361
+ process. This is typically required if one wants to drop SQLite
362
+ constraints, as these constraints will not have names when
363
+ reflected on this backend. Requires SQLAlchemy **0.9.4** or greater.
364
+
365
+ .. seealso::
366
+
367
+ :ref:`dropping_sqlite_foreign_keys`
368
+
369
+ :param partial_reordering: a list of tuples, each suggesting a desired
370
+ ordering of two or more columns in the newly created table. Requires
371
+ that :paramref:`.batch_alter_table.recreate` is set to ``"always"``.
372
+ Examples, given a table with columns "a", "b", "c", and "d":
373
+
374
+ Specify the order of all columns::
375
+
376
+ with op.batch_alter_table(
377
+ "some_table",
378
+ recreate="always",
379
+ partial_reordering=[("c", "d", "a", "b")],
380
+ ) as batch_op:
381
+ pass
382
+
383
+ Ensure "d" appears before "c", and "b", appears before "a"::
384
+
385
+ with op.batch_alter_table(
386
+ "some_table",
387
+ recreate="always",
388
+ partial_reordering=[("d", "c"), ("b", "a")],
389
+ ) as batch_op:
390
+ pass
391
+
392
+ The ordering of columns not included in the partial_reordering
393
+ set is undefined. Therefore it is best to specify the complete
394
+ ordering of all columns for best results.
395
+
396
+ .. note:: batch mode requires SQLAlchemy 0.8 or above.
397
+
398
+ .. seealso::
399
+
400
+ :ref:`batch_migrations`
401
+
402
+ """
403
+
404
+ def bulk_insert(
405
+ table: Union[Table, TableClause],
406
+ rows: List[Dict[str, Any]],
407
+ *,
408
+ multiinsert: bool = True,
409
+ ) -> None:
410
+ """Issue a "bulk insert" operation using the current
411
+ migration context.
412
+
413
+ This provides a means of representing an INSERT of multiple rows
414
+ which works equally well in the context of executing on a live
415
+ connection as well as that of generating a SQL script. In the
416
+ case of a SQL script, the values are rendered inline into the
417
+ statement.
418
+
419
+ e.g.::
420
+
421
+ from alembic import op
422
+ from datetime import date
423
+ from sqlalchemy.sql import table, column
424
+ from sqlalchemy import String, Integer, Date
425
+
426
+ # Create an ad-hoc table to use for the insert statement.
427
+ accounts_table = table(
428
+ "account",
429
+ column("id", Integer),
430
+ column("name", String),
431
+ column("create_date", Date),
432
+ )
433
+
434
+ op.bulk_insert(
435
+ accounts_table,
436
+ [
437
+ {
438
+ "id": 1,
439
+ "name": "John Smith",
440
+ "create_date": date(2010, 10, 5),
441
+ },
442
+ {
443
+ "id": 2,
444
+ "name": "Ed Williams",
445
+ "create_date": date(2007, 5, 27),
446
+ },
447
+ {
448
+ "id": 3,
449
+ "name": "Wendy Jones",
450
+ "create_date": date(2008, 8, 15),
451
+ },
452
+ ],
453
+ )
454
+
455
+ When using --sql mode, some datatypes may not render inline
456
+ automatically, such as dates and other special types. When this
457
+ issue is present, :meth:`.Operations.inline_literal` may be used::
458
+
459
+ op.bulk_insert(
460
+ accounts_table,
461
+ [
462
+ {
463
+ "id": 1,
464
+ "name": "John Smith",
465
+ "create_date": op.inline_literal("2010-10-05"),
466
+ },
467
+ {
468
+ "id": 2,
469
+ "name": "Ed Williams",
470
+ "create_date": op.inline_literal("2007-05-27"),
471
+ },
472
+ {
473
+ "id": 3,
474
+ "name": "Wendy Jones",
475
+ "create_date": op.inline_literal("2008-08-15"),
476
+ },
477
+ ],
478
+ multiinsert=False,
479
+ )
480
+
481
+ When using :meth:`.Operations.inline_literal` in conjunction with
482
+ :meth:`.Operations.bulk_insert`, in order for the statement to work
483
+ in "online" (e.g. non --sql) mode, the
484
+ :paramref:`~.Operations.bulk_insert.multiinsert`
485
+ flag should be set to ``False``, which will have the effect of
486
+ individual INSERT statements being emitted to the database, each
487
+ with a distinct VALUES clause, so that the "inline" values can
488
+ still be rendered, rather than attempting to pass the values
489
+ as bound parameters.
490
+
491
+ :param table: a table object which represents the target of the INSERT.
492
+
493
+ :param rows: a list of dictionaries indicating rows.
494
+
495
+ :param multiinsert: when at its default of True and --sql mode is not
496
+ enabled, the INSERT statement will be executed using
497
+ "executemany()" style, where all elements in the list of
498
+ dictionaries are passed as bound parameters in a single
499
+ list. Setting this to False results in individual INSERT
500
+ statements being emitted per parameter set, and is needed
501
+ in those cases where non-literal values are present in the
502
+ parameter sets.
503
+
504
+ """
505
+
506
+ def create_check_constraint(
507
+ constraint_name: Optional[str],
508
+ table_name: str,
509
+ condition: Union[str, ColumnElement[bool], TextClause],
510
+ *,
511
+ schema: Optional[str] = None,
512
+ **kw: Any,
513
+ ) -> None:
514
+ """Issue a "create check constraint" instruction using the
515
+ current migration context.
516
+
517
+ e.g.::
518
+
519
+ from alembic import op
520
+ from sqlalchemy.sql import column, func
521
+
522
+ op.create_check_constraint(
523
+ "ck_user_name_len",
524
+ "user",
525
+ func.len(column("name")) > 5,
526
+ )
527
+
528
+ CHECK constraints are usually against a SQL expression, so ad-hoc
529
+ table metadata is usually needed. The function will convert the given
530
+ arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
531
+ to an anonymous table in order to emit the CREATE statement.
532
+
533
+ :param name: Name of the check constraint. The name is necessary
534
+ so that an ALTER statement can be emitted. For setups that
535
+ use an automated naming scheme such as that described at
536
+ :ref:`sqla:constraint_naming_conventions`,
537
+ ``name`` here can be ``None``, as the event listener will
538
+ apply the name to the constraint object when it is associated
539
+ with the table.
540
+ :param table_name: String name of the source table.
541
+ :param condition: SQL expression that's the condition of the
542
+ constraint. Can be a string or SQLAlchemy expression language
543
+ structure.
544
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
545
+ NOT DEFERRABLE when issuing DDL for this constraint.
546
+ :param initially: optional string. If set, emit INITIALLY <value>
547
+ when issuing DDL for this constraint.
548
+ :param schema: Optional schema name to operate within. To control
549
+ quoting of the schema outside of the default behavior, use
550
+ the SQLAlchemy construct
551
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
552
+
553
+ """
554
+
555
+ def create_exclude_constraint(
556
+ constraint_name: str, table_name: str, *elements: Any, **kw: Any
557
+ ) -> Optional[Table]:
558
+ """Issue an alter to create an EXCLUDE constraint using the
559
+ current migration context.
560
+
561
+ .. note:: This method is Postgresql specific, and additionally
562
+ requires at least SQLAlchemy 1.0.
563
+
564
+ e.g.::
565
+
566
+ from alembic import op
567
+
568
+ op.create_exclude_constraint(
569
+ "user_excl",
570
+ "user",
571
+ ("period", "&&"),
572
+ ("group", "="),
573
+ where=("group != 'some group'"),
574
+ )
575
+
576
+ Note that the expressions work the same way as that of
577
+ the ``ExcludeConstraint`` object itself; if plain strings are
578
+ passed, quoting rules must be applied manually.
579
+
580
+ :param name: Name of the constraint.
581
+ :param table_name: String name of the source table.
582
+ :param elements: exclude conditions.
583
+ :param where: SQL expression or SQL string with optional WHERE
584
+ clause.
585
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
586
+ NOT DEFERRABLE when issuing DDL for this constraint.
587
+ :param initially: optional string. If set, emit INITIALLY <value>
588
+ when issuing DDL for this constraint.
589
+ :param schema: Optional schema name to operate within.
590
+
591
+ """
592
+
593
+ def create_foreign_key(
594
+ constraint_name: Optional[str],
595
+ source_table: str,
596
+ referent_table: str,
597
+ local_cols: List[str],
598
+ remote_cols: List[str],
599
+ *,
600
+ onupdate: Optional[str] = None,
601
+ ondelete: Optional[str] = None,
602
+ deferrable: Optional[bool] = None,
603
+ initially: Optional[str] = None,
604
+ match: Optional[str] = None,
605
+ source_schema: Optional[str] = None,
606
+ referent_schema: Optional[str] = None,
607
+ **dialect_kw: Any,
608
+ ) -> None:
609
+ """Issue a "create foreign key" instruction using the
610
+ current migration context.
611
+
612
+ e.g.::
613
+
614
+ from alembic import op
615
+
616
+ op.create_foreign_key(
617
+ "fk_user_address",
618
+ "address",
619
+ "user",
620
+ ["user_id"],
621
+ ["id"],
622
+ )
623
+
624
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
625
+ containing the necessary columns, then generates a new
626
+ :class:`~sqlalchemy.schema.ForeignKeyConstraint`
627
+ object which it then associates with the
628
+ :class:`~sqlalchemy.schema.Table`.
629
+ Any event listeners associated with this action will be fired
630
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
631
+ construct is ultimately used to generate the ALTER statement.
632
+
633
+ :param constraint_name: Name of the foreign key constraint. The name
634
+ is necessary so that an ALTER statement can be emitted. For setups
635
+ that use an automated naming scheme such as that described at
636
+ :ref:`sqla:constraint_naming_conventions`,
637
+ ``name`` here can be ``None``, as the event listener will
638
+ apply the name to the constraint object when it is associated
639
+ with the table.
640
+ :param source_table: String name of the source table.
641
+ :param referent_table: String name of the destination table.
642
+ :param local_cols: a list of string column names in the
643
+ source table.
644
+ :param remote_cols: a list of string column names in the
645
+ remote table.
646
+ :param onupdate: Optional string. If set, emit ON UPDATE <value> when
647
+ issuing DDL for this constraint. Typical values include CASCADE,
648
+ DELETE and RESTRICT.
649
+ :param ondelete: Optional string. If set, emit ON DELETE <value> when
650
+ issuing DDL for this constraint. Typical values include CASCADE,
651
+ DELETE and RESTRICT.
652
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT
653
+ DEFERRABLE when issuing DDL for this constraint.
654
+ :param source_schema: Optional schema name of the source table.
655
+ :param referent_schema: Optional schema name of the destination table.
656
+
657
+ """
658
+
659
+ def create_index(
660
+ index_name: Optional[str],
661
+ table_name: str,
662
+ columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
663
+ *,
664
+ schema: Optional[str] = None,
665
+ unique: bool = False,
666
+ if_not_exists: Optional[bool] = None,
667
+ **kw: Any,
668
+ ) -> None:
669
+ r"""Issue a "create index" instruction using the current
670
+ migration context.
671
+
672
+ e.g.::
673
+
674
+ from alembic import op
675
+
676
+ op.create_index("ik_test", "t1", ["foo", "bar"])
677
+
678
+ Functional indexes can be produced by using the
679
+ :func:`sqlalchemy.sql.expression.text` construct::
680
+
681
+ from alembic import op
682
+ from sqlalchemy import text
683
+
684
+ op.create_index("ik_test", "t1", [text("lower(foo)")])
685
+
686
+ :param index_name: name of the index.
687
+ :param table_name: name of the owning table.
688
+ :param columns: a list consisting of string column names and/or
689
+ :func:`~sqlalchemy.sql.expression.text` constructs.
690
+ :param schema: Optional schema name to operate within. To control
691
+ quoting of the schema outside of the default behavior, use
692
+ the SQLAlchemy construct
693
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
694
+ :param unique: If True, create a unique index.
695
+
696
+ :param quote: Force quoting of this column's name on or off,
697
+ corresponding to ``True`` or ``False``. When left at its default
698
+ of ``None``, the column identifier will be quoted according to
699
+ whether the name is case sensitive (identifiers with at least one
700
+ upper case character are treated as case sensitive), or if it's a
701
+ reserved word. This flag is only needed to force quoting of a
702
+ reserved word which is not known by the SQLAlchemy dialect.
703
+
704
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
705
+ creating the new index.
706
+
707
+ .. versionadded:: 1.12.0
708
+
709
+ :param \**kw: Additional keyword arguments not mentioned above are
710
+ dialect specific, and passed in the form
711
+ ``<dialectname>_<argname>``.
712
+ See the documentation regarding an individual dialect at
713
+ :ref:`dialect_toplevel` for detail on documented arguments.
714
+
715
+ """
716
+
717
+ def create_primary_key(
718
+ constraint_name: Optional[str],
719
+ table_name: str,
720
+ columns: List[str],
721
+ *,
722
+ schema: Optional[str] = None,
723
+ ) -> None:
724
+ """Issue a "create primary key" instruction using the current
725
+ migration context.
726
+
727
+ e.g.::
728
+
729
+ from alembic import op
730
+
731
+ op.create_primary_key("pk_my_table", "my_table", ["id", "version"])
732
+
733
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
734
+ containing the necessary columns, then generates a new
735
+ :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
736
+ object which it then associates with the
737
+ :class:`~sqlalchemy.schema.Table`.
738
+ Any event listeners associated with this action will be fired
739
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
740
+ construct is ultimately used to generate the ALTER statement.
741
+
742
+ :param constraint_name: Name of the primary key constraint. The name
743
+ is necessary so that an ALTER statement can be emitted. For setups
744
+ that use an automated naming scheme such as that described at
745
+ :ref:`sqla:constraint_naming_conventions`
746
+ ``name`` here can be ``None``, as the event listener will
747
+ apply the name to the constraint object when it is associated
748
+ with the table.
749
+ :param table_name: String name of the target table.
750
+ :param columns: a list of string column names to be applied to the
751
+ primary key constraint.
752
+ :param schema: Optional schema name to operate within. To control
753
+ quoting of the schema outside of the default behavior, use
754
+ the SQLAlchemy construct
755
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
756
+
757
+ """
758
+
759
+ def create_table(
760
+ table_name: str,
761
+ *columns: SchemaItem,
762
+ if_not_exists: Optional[bool] = None,
763
+ **kw: Any,
764
+ ) -> Table:
765
+ r"""Issue a "create table" instruction using the current migration
766
+ context.
767
+
768
+ This directive receives an argument list similar to that of the
769
+ traditional :class:`sqlalchemy.schema.Table` construct, but without the
770
+ metadata::
771
+
772
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
773
+ from alembic import op
774
+
775
+ op.create_table(
776
+ "account",
777
+ Column("id", INTEGER, primary_key=True),
778
+ Column("name", VARCHAR(50), nullable=False),
779
+ Column("description", NVARCHAR(200)),
780
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
781
+ )
782
+
783
+ Note that :meth:`.create_table` accepts
784
+ :class:`~sqlalchemy.schema.Column`
785
+ constructs directly from the SQLAlchemy library. In particular,
786
+ default values to be created on the database side are
787
+ specified using the ``server_default`` parameter, and not
788
+ ``default`` which only specifies Python-side defaults::
789
+
790
+ from alembic import op
791
+ from sqlalchemy import Column, TIMESTAMP, func
792
+
793
+ # specify "DEFAULT NOW" along with the "timestamp" column
794
+ op.create_table(
795
+ "account",
796
+ Column("id", INTEGER, primary_key=True),
797
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
798
+ )
799
+
800
+ The function also returns a newly created
801
+ :class:`~sqlalchemy.schema.Table` object, corresponding to the table
802
+ specification given, which is suitable for
803
+ immediate SQL operations, in particular
804
+ :meth:`.Operations.bulk_insert`::
805
+
806
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
807
+ from alembic import op
808
+
809
+ account_table = op.create_table(
810
+ "account",
811
+ Column("id", INTEGER, primary_key=True),
812
+ Column("name", VARCHAR(50), nullable=False),
813
+ Column("description", NVARCHAR(200)),
814
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
815
+ )
816
+
817
+ op.bulk_insert(
818
+ account_table,
819
+ [
820
+ {"name": "A1", "description": "account 1"},
821
+ {"name": "A2", "description": "account 2"},
822
+ ],
823
+ )
824
+
825
+ :param table_name: Name of the table
826
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
827
+ objects within
828
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
829
+ objects
830
+ and :class:`~.sqlalchemy.schema.Index` objects.
831
+ :param schema: Optional schema name to operate within. To control
832
+ quoting of the schema outside of the default behavior, use
833
+ the SQLAlchemy construct
834
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
835
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
836
+ creating the new table.
837
+
838
+ .. versionadded:: 1.13.3
839
+ :param \**kw: Other keyword arguments are passed to the underlying
840
+ :class:`sqlalchemy.schema.Table` object created for the command.
841
+
842
+ :return: the :class:`~sqlalchemy.schema.Table` object corresponding
843
+ to the parameters given.
844
+
845
+ """
846
+
847
+ def create_table_comment(
848
+ table_name: str,
849
+ comment: Optional[str],
850
+ *,
851
+ existing_comment: Optional[str] = None,
852
+ schema: Optional[str] = None,
853
+ ) -> None:
854
+ """Emit a COMMENT ON operation to set the comment for a table.
855
+
856
+ :param table_name: string name of the target table.
857
+ :param comment: string value of the comment being registered against
858
+ the specified table.
859
+ :param existing_comment: String value of a comment
860
+ already registered on the specified table, used within autogenerate
861
+ so that the operation is reversible, but not required for direct
862
+ use.
863
+
864
+ .. seealso::
865
+
866
+ :meth:`.Operations.drop_table_comment`
867
+
868
+ :paramref:`.Operations.alter_column.comment`
869
+
870
+ """
871
+
872
+ def create_unique_constraint(
873
+ constraint_name: Optional[str],
874
+ table_name: str,
875
+ columns: Sequence[str],
876
+ *,
877
+ schema: Optional[str] = None,
878
+ **kw: Any,
879
+ ) -> Any:
880
+ """Issue a "create unique constraint" instruction using the
881
+ current migration context.
882
+
883
+ e.g.::
884
+
885
+ from alembic import op
886
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
887
+
888
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
889
+ containing the necessary columns, then generates a new
890
+ :class:`~sqlalchemy.schema.UniqueConstraint`
891
+ object which it then associates with the
892
+ :class:`~sqlalchemy.schema.Table`.
893
+ Any event listeners associated with this action will be fired
894
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
895
+ construct is ultimately used to generate the ALTER statement.
896
+
897
+ :param name: Name of the unique constraint. The name is necessary
898
+ so that an ALTER statement can be emitted. For setups that
899
+ use an automated naming scheme such as that described at
900
+ :ref:`sqla:constraint_naming_conventions`,
901
+ ``name`` here can be ``None``, as the event listener will
902
+ apply the name to the constraint object when it is associated
903
+ with the table.
904
+ :param table_name: String name of the source table.
905
+ :param columns: a list of string column names in the
906
+ source table.
907
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
908
+ NOT DEFERRABLE when issuing DDL for this constraint.
909
+ :param initially: optional string. If set, emit INITIALLY <value>
910
+ when issuing DDL for this constraint.
911
+ :param schema: Optional schema name to operate within. To control
912
+ quoting of the schema outside of the default behavior, use
913
+ the SQLAlchemy construct
914
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
915
+
916
+ """
917
+
918
+ def drop_column(
919
+ table_name: str,
920
+ column_name: str,
921
+ *,
922
+ schema: Optional[str] = None,
923
+ **kw: Any,
924
+ ) -> None:
925
+ """Issue a "drop column" instruction using the current
926
+ migration context.
927
+
928
+ e.g.::
929
+
930
+ drop_column("organization", "account_id")
931
+
932
+ :param table_name: name of table
933
+ :param column_name: name of column
934
+ :param schema: Optional schema name to operate within. To control
935
+ quoting of the schema outside of the default behavior, use
936
+ the SQLAlchemy construct
937
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
938
+ :param if_exists: If True, adds IF EXISTS operator when
939
+ dropping the new column for compatible dialects
940
+
941
+ .. versionadded:: 1.16.0
942
+
943
+ :param mssql_drop_check: Optional boolean. When ``True``, on
944
+ Microsoft SQL Server only, first
945
+ drop the CHECK constraint on the column using a
946
+ SQL-script-compatible
947
+ block that selects into a @variable from sys.check_constraints,
948
+ then exec's a separate DROP CONSTRAINT for that constraint.
949
+ :param mssql_drop_default: Optional boolean. When ``True``, on
950
+ Microsoft SQL Server only, first
951
+ drop the DEFAULT constraint on the column using a
952
+ SQL-script-compatible
953
+ block that selects into a @variable from sys.default_constraints,
954
+ then exec's a separate DROP CONSTRAINT for that default.
955
+ :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
956
+ Microsoft SQL Server only, first
957
+ drop a single FOREIGN KEY constraint on the column using a
958
+ SQL-script-compatible
959
+ block that selects into a @variable from
960
+ sys.foreign_keys/sys.foreign_key_columns,
961
+ then exec's a separate DROP CONSTRAINT for that default. Only
962
+ works if the column has exactly one FK constraint which refers to
963
+ it, at the moment.
964
+ """
965
+
966
+ def drop_constraint(
967
+ constraint_name: str,
968
+ table_name: str,
969
+ type_: Optional[str] = None,
970
+ *,
971
+ schema: Optional[str] = None,
972
+ if_exists: Optional[bool] = None,
973
+ ) -> None:
974
+ r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
975
+
976
+ :param constraint_name: name of the constraint.
977
+ :param table_name: table name.
978
+ :param type\_: optional, required on MySQL. can be
979
+ 'foreignkey', 'primary', 'unique', or 'check'.
980
+ :param schema: Optional schema name to operate within. To control
981
+ quoting of the schema outside of the default behavior, use
982
+ the SQLAlchemy construct
983
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
984
+ :param if_exists: If True, adds IF EXISTS operator when
985
+ dropping the constraint
986
+
987
+ .. versionadded:: 1.16.0
988
+
989
+ """
990
+
991
+ def drop_index(
992
+ index_name: str,
993
+ table_name: Optional[str] = None,
994
+ *,
995
+ schema: Optional[str] = None,
996
+ if_exists: Optional[bool] = None,
997
+ **kw: Any,
998
+ ) -> None:
999
+ r"""Issue a "drop index" instruction using the current
1000
+ migration context.
1001
+
1002
+ e.g.::
1003
+
1004
+ drop_index("accounts")
1005
+
1006
+ :param index_name: name of the index.
1007
+ :param table_name: name of the owning table. Some
1008
+ backends such as Microsoft SQL Server require this.
1009
+ :param schema: Optional schema name to operate within. To control
1010
+ quoting of the schema outside of the default behavior, use
1011
+ the SQLAlchemy construct
1012
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1013
+
1014
+ :param if_exists: If True, adds IF EXISTS operator when
1015
+ dropping the index.
1016
+
1017
+ .. versionadded:: 1.12.0
1018
+
1019
+ :param \**kw: Additional keyword arguments not mentioned above are
1020
+ dialect specific, and passed in the form
1021
+ ``<dialectname>_<argname>``.
1022
+ See the documentation regarding an individual dialect at
1023
+ :ref:`dialect_toplevel` for detail on documented arguments.
1024
+
1025
+ """
1026
+
1027
+ def drop_table(
1028
+ table_name: str,
1029
+ *,
1030
+ schema: Optional[str] = None,
1031
+ if_exists: Optional[bool] = None,
1032
+ **kw: Any,
1033
+ ) -> None:
1034
+ r"""Issue a "drop table" instruction using the current
1035
+ migration context.
1036
+
1037
+
1038
+ e.g.::
1039
+
1040
+ drop_table("accounts")
1041
+
1042
+ :param table_name: Name of the table
1043
+ :param schema: Optional schema name to operate within. To control
1044
+ quoting of the schema outside of the default behavior, use
1045
+ the SQLAlchemy construct
1046
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1047
+ :param if_exists: If True, adds IF EXISTS operator when
1048
+ dropping the table.
1049
+
1050
+ .. versionadded:: 1.13.3
1051
+ :param \**kw: Other keyword arguments are passed to the underlying
1052
+ :class:`sqlalchemy.schema.Table` object created for the command.
1053
+
1054
+ """
1055
+
1056
+ def drop_table_comment(
1057
+ table_name: str,
1058
+ *,
1059
+ existing_comment: Optional[str] = None,
1060
+ schema: Optional[str] = None,
1061
+ ) -> None:
1062
+ """Issue a "drop table comment" operation to
1063
+ remove an existing comment set on a table.
1064
+
1065
+ :param table_name: string name of the target table.
1066
+ :param existing_comment: An optional string value of a comment already
1067
+ registered on the specified table.
1068
+
1069
+ .. seealso::
1070
+
1071
+ :meth:`.Operations.create_table_comment`
1072
+
1073
+ :paramref:`.Operations.alter_column.comment`
1074
+
1075
+ """
1076
+
1077
+ def execute(
1078
+ sqltext: Union[Executable, str],
1079
+ *,
1080
+ execution_options: Optional[dict[str, Any]] = None,
1081
+ ) -> None:
1082
+ r"""Execute the given SQL using the current migration context.
1083
+
1084
+ The given SQL can be a plain string, e.g.::
1085
+
1086
+ op.execute("INSERT INTO table (foo) VALUES ('some value')")
1087
+
1088
+ Or it can be any kind of Core SQL Expression construct, such as
1089
+ below where we use an update construct::
1090
+
1091
+ from sqlalchemy.sql import table, column
1092
+ from sqlalchemy import String
1093
+ from alembic import op
1094
+
1095
+ account = table("account", column("name", String))
1096
+ op.execute(
1097
+ account.update()
1098
+ .where(account.c.name == op.inline_literal("account 1"))
1099
+ .values({"name": op.inline_literal("account 2")})
1100
+ )
1101
+
1102
+ Above, we made use of the SQLAlchemy
1103
+ :func:`sqlalchemy.sql.expression.table` and
1104
+ :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
1105
+ ad-hoc table construct just for our UPDATE statement. A full
1106
+ :class:`~sqlalchemy.schema.Table` construct of course works perfectly
1107
+ fine as well, though note it's a recommended practice to at least
1108
+ ensure the definition of a table is self-contained within the migration
1109
+ script, rather than imported from a module that may break compatibility
1110
+ with older migrations.
1111
+
1112
+ In a SQL script context, the statement is emitted directly to the
1113
+ output stream. There is *no* return result, however, as this
1114
+ function is oriented towards generating a change script
1115
+ that can run in "offline" mode. Additionally, parameterized
1116
+ statements are discouraged here, as they *will not work* in offline
1117
+ mode. Above, we use :meth:`.inline_literal` where parameters are
1118
+ to be used.
1119
+
1120
+ For full interaction with a connected database where parameters can
1121
+ also be used normally, use the "bind" available from the context::
1122
+
1123
+ from alembic import op
1124
+
1125
+ connection = op.get_bind()
1126
+
1127
+ connection.execute(
1128
+ account.update()
1129
+ .where(account.c.name == "account 1")
1130
+ .values({"name": "account 2"})
1131
+ )
1132
+
1133
+ Additionally, when passing the statement as a plain string, it is first
1134
+ coerced into a :func:`sqlalchemy.sql.expression.text` construct
1135
+ before being passed along. In the less likely case that the
1136
+ literal SQL string contains a colon, it must be escaped with a
1137
+ backslash, as::
1138
+
1139
+ op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')")
1140
+
1141
+
1142
+ :param sqltext: Any legal SQLAlchemy expression, including:
1143
+
1144
+ * a string
1145
+ * a :func:`sqlalchemy.sql.expression.text` construct.
1146
+ * a :func:`sqlalchemy.sql.expression.insert` construct.
1147
+ * a :func:`sqlalchemy.sql.expression.update` construct.
1148
+ * a :func:`sqlalchemy.sql.expression.delete` construct.
1149
+ * Any "executable" described in SQLAlchemy Core documentation,
1150
+ noting that no result set is returned.
1151
+
1152
+ .. note:: when passing a plain string, the statement is coerced into
1153
+ a :func:`sqlalchemy.sql.expression.text` construct. This construct
1154
+ considers symbols with colons, e.g. ``:foo`` to be bound parameters.
1155
+ To avoid this, ensure that colon symbols are escaped, e.g.
1156
+ ``\:foo``.
1157
+
1158
+ :param execution_options: Optional dictionary of
1159
+ execution options, will be passed to
1160
+ :meth:`sqlalchemy.engine.Connection.execution_options`.
1161
+ """
1162
+
1163
+ def f(name: str) -> conv:
1164
+ """Indicate a string name that has already had a naming convention
1165
+ applied to it.
1166
+
1167
+ This feature combines with the SQLAlchemy ``naming_convention`` feature
1168
+ to disambiguate constraint names that have already had naming
1169
+ conventions applied to them, versus those that have not. This is
1170
+ necessary in the case that the ``"%(constraint_name)s"`` token
1171
+ is used within a naming convention, so that it can be identified
1172
+ that this particular name should remain fixed.
1173
+
1174
+ If the :meth:`.Operations.f` is used on a constraint, the naming
1175
+ convention will not take effect::
1176
+
1177
+ op.add_column("t", "x", Boolean(name=op.f("ck_bool_t_x")))
1178
+
1179
+ Above, the CHECK constraint generated will have the name
1180
+ ``ck_bool_t_x`` regardless of whether or not a naming convention is
1181
+ in use.
1182
+
1183
+ Alternatively, if a naming convention is in use, and 'f' is not used,
1184
+ names will be converted along conventions. If the ``target_metadata``
1185
+ contains the naming convention
1186
+ ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
1187
+ output of the following::
1188
+
1189
+ op.add_column("t", "x", Boolean(name="x"))
1190
+
1191
+ will be::
1192
+
1193
+ CONSTRAINT ck_bool_t_x CHECK (x in (1, 0)))
1194
+
1195
+ The function is rendered in the output of autogenerate when
1196
+ a particular constraint name is already converted.
1197
+
1198
+ """
1199
+
1200
+ def get_bind() -> Connection:
1201
+ """Return the current 'bind'.
1202
+
1203
+ Under normal circumstances, this is the
1204
+ :class:`~sqlalchemy.engine.Connection` currently being used
1205
+ to emit SQL to the database.
1206
+
1207
+ In a SQL script context, this value is ``None``. [TODO: verify this]
1208
+
1209
+ """
1210
+
1211
+ def get_context() -> MigrationContext:
1212
+ """Return the :class:`.MigrationContext` object that's
1213
+ currently in use.
1214
+
1215
+ """
1216
+
1217
+ def implementation_for(op_cls: Any) -> Callable[[_C], _C]:
1218
+ """Register an implementation for a given :class:`.MigrateOperation`.
1219
+
1220
+ This is part of the operation extensibility API.
1221
+
1222
+ .. seealso::
1223
+
1224
+ :ref:`operation_plugins` - example of use
1225
+
1226
+ """
1227
+
1228
+ def inline_literal(
1229
+ value: Union[str, int], type_: Optional[TypeEngine[Any]] = None
1230
+ ) -> _literal_bindparam:
1231
+ r"""Produce an 'inline literal' expression, suitable for
1232
+ using in an INSERT, UPDATE, or DELETE statement.
1233
+
1234
+ When using Alembic in "offline" mode, CRUD operations
1235
+ aren't compatible with SQLAlchemy's default behavior surrounding
1236
+ literal values,
1237
+ which is that they are converted into bound values and passed
1238
+ separately into the ``execute()`` method of the DBAPI cursor.
1239
+ An offline SQL
1240
+ script needs to have these rendered inline. While it should
1241
+ always be noted that inline literal values are an **enormous**
1242
+ security hole in an application that handles untrusted input,
1243
+ a schema migration is not run in this context, so
1244
+ literals are safe to render inline, with the caveat that
1245
+ advanced types like dates may not be supported directly
1246
+ by SQLAlchemy.
1247
+
1248
+ See :meth:`.Operations.execute` for an example usage of
1249
+ :meth:`.Operations.inline_literal`.
1250
+
1251
+ The environment can also be configured to attempt to render
1252
+ "literal" values inline automatically, for those simple types
1253
+ that are supported by the dialect; see
1254
+ :paramref:`.EnvironmentContext.configure.literal_binds` for this
1255
+ more recently added feature.
1256
+
1257
+ :param value: The value to render. Strings, integers, and simple
1258
+ numerics should be supported. Other types like boolean,
1259
+ dates, etc. may or may not be supported yet by various
1260
+ backends.
1261
+ :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine`
1262
+ subclass stating the type of this value. In SQLAlchemy
1263
+ expressions, this is usually derived automatically
1264
+ from the Python type of the value itself, as well as
1265
+ based on the context in which the value is used.
1266
+
1267
+ .. seealso::
1268
+
1269
+ :paramref:`.EnvironmentContext.configure.literal_binds`
1270
+
1271
+ """
1272
+
1273
+ @overload
1274
+ def invoke(operation: CreateTableOp) -> Table: ...
1275
+ @overload
1276
+ def invoke(
1277
+ operation: Union[
1278
+ AddConstraintOp,
1279
+ DropConstraintOp,
1280
+ CreateIndexOp,
1281
+ DropIndexOp,
1282
+ AddColumnOp,
1283
+ AlterColumnOp,
1284
+ AlterTableOp,
1285
+ CreateTableCommentOp,
1286
+ DropTableCommentOp,
1287
+ DropColumnOp,
1288
+ BulkInsertOp,
1289
+ DropTableOp,
1290
+ ExecuteSQLOp,
1291
+ ],
1292
+ ) -> None: ...
1293
+ @overload
1294
+ def invoke(operation: MigrateOperation) -> Any:
1295
+ """Given a :class:`.MigrateOperation`, invoke it in terms of
1296
+ this :class:`.Operations` instance.
1297
+
1298
+ """
1299
+
1300
+ def register_operation(
1301
+ name: str, sourcename: Optional[str] = None
1302
+ ) -> Callable[[Type[_T]], Type[_T]]:
1303
+ """Register a new operation for this class.
1304
+
1305
+ This method is normally used to add new operations
1306
+ to the :class:`.Operations` class, and possibly the
1307
+ :class:`.BatchOperations` class as well. All Alembic migration
1308
+ operations are implemented via this system, however the system
1309
+ is also available as a public API to facilitate adding custom
1310
+ operations.
1311
+
1312
+ .. seealso::
1313
+
1314
+ :ref:`operation_plugins`
1315
+
1316
+
1317
+ """
1318
+
1319
+ def rename_table(
1320
+ old_table_name: str, new_table_name: str, *, schema: Optional[str] = None
1321
+ ) -> None:
1322
+ """Emit an ALTER TABLE to rename a table.
1323
+
1324
+ :param old_table_name: old name.
1325
+ :param new_table_name: new name.
1326
+ :param schema: Optional schema name to operate within. To control
1327
+ quoting of the schema outside of the default behavior, use
1328
+ the SQLAlchemy construct
1329
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1330
+
1331
+ """
1332
+
1333
+ def run_async(
1334
+ async_function: Callable[..., Awaitable[_T]], *args: Any, **kw_args: Any
1335
+ ) -> _T:
1336
+ """Invoke the given asynchronous callable, passing an asynchronous
1337
+ :class:`~sqlalchemy.ext.asyncio.AsyncConnection` as the first
1338
+ argument.
1339
+
1340
+ This method allows calling async functions from within the
1341
+ synchronous ``upgrade()`` or ``downgrade()`` alembic migration
1342
+ method.
1343
+
1344
+ The async connection passed to the callable shares the same
1345
+ transaction as the connection running in the migration context.
1346
+
1347
+ Any additional arg or kw_arg passed to this function are passed
1348
+ to the provided async function.
1349
+
1350
+ .. versionadded: 1.11
1351
+
1352
+ .. note::
1353
+
1354
+ This method can be called only when alembic is called using
1355
+ an async dialect.
1356
+ """
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/__init__.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from . import toimpl
2
+ from .base import AbstractOperations
3
+ from .base import BatchOperations
4
+ from .base import Operations
5
+ from .ops import MigrateOperation
6
+ from .ops import MigrationScript
7
+
8
+
9
+ __all__ = [
10
+ "AbstractOperations",
11
+ "Operations",
12
+ "BatchOperations",
13
+ "MigrateOperation",
14
+ "MigrationScript",
15
+ ]
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/base.py ADDED
@@ -0,0 +1,1923 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-calls
2
+
3
+ from __future__ import annotations
4
+
5
+ from contextlib import contextmanager
6
+ import re
7
+ import textwrap
8
+ from typing import Any
9
+ from typing import Awaitable
10
+ from typing import Callable
11
+ from typing import Dict
12
+ from typing import Iterator
13
+ from typing import List # noqa
14
+ from typing import Mapping
15
+ from typing import NoReturn
16
+ from typing import Optional
17
+ from typing import overload
18
+ from typing import Sequence # noqa
19
+ from typing import Tuple
20
+ from typing import Type # noqa
21
+ from typing import TYPE_CHECKING
22
+ from typing import TypeVar
23
+ from typing import Union
24
+
25
+ from sqlalchemy.sql.elements import conv
26
+
27
+ from . import batch
28
+ from . import schemaobj
29
+ from .. import util
30
+ from ..util import sqla_compat
31
+ from ..util.compat import formatannotation_fwdref
32
+ from ..util.compat import inspect_formatargspec
33
+ from ..util.compat import inspect_getfullargspec
34
+ from ..util.sqla_compat import _literal_bindparam
35
+
36
+
37
+ if TYPE_CHECKING:
38
+ from typing import Literal
39
+
40
+ from sqlalchemy import Table
41
+ from sqlalchemy.engine import Connection
42
+ from sqlalchemy.sql import Executable
43
+ from sqlalchemy.sql.expression import ColumnElement
44
+ from sqlalchemy.sql.expression import TableClause
45
+ from sqlalchemy.sql.expression import TextClause
46
+ from sqlalchemy.sql.schema import Column
47
+ from sqlalchemy.sql.schema import Computed
48
+ from sqlalchemy.sql.schema import Identity
49
+ from sqlalchemy.sql.schema import SchemaItem
50
+ from sqlalchemy.types import TypeEngine
51
+
52
+ from .batch import BatchOperationsImpl
53
+ from .ops import AddColumnOp
54
+ from .ops import AddConstraintOp
55
+ from .ops import AlterColumnOp
56
+ from .ops import AlterTableOp
57
+ from .ops import BulkInsertOp
58
+ from .ops import CreateIndexOp
59
+ from .ops import CreateTableCommentOp
60
+ from .ops import CreateTableOp
61
+ from .ops import DropColumnOp
62
+ from .ops import DropConstraintOp
63
+ from .ops import DropIndexOp
64
+ from .ops import DropTableCommentOp
65
+ from .ops import DropTableOp
66
+ from .ops import ExecuteSQLOp
67
+ from .ops import MigrateOperation
68
+ from ..ddl import DefaultImpl
69
+ from ..runtime.migration import MigrationContext
70
+ __all__ = ("Operations", "BatchOperations")
71
+ _T = TypeVar("_T")
72
+
73
+ _C = TypeVar("_C", bound=Callable[..., Any])
74
+
75
+
76
+ class AbstractOperations(util.ModuleClsProxy):
77
+ """Base class for Operations and BatchOperations.
78
+
79
+ .. versionadded:: 1.11.0
80
+
81
+ """
82
+
83
+ impl: Union[DefaultImpl, BatchOperationsImpl]
84
+ _to_impl = util.Dispatcher()
85
+
86
+ def __init__(
87
+ self,
88
+ migration_context: MigrationContext,
89
+ impl: Optional[BatchOperationsImpl] = None,
90
+ ) -> None:
91
+ """Construct a new :class:`.Operations`
92
+
93
+ :param migration_context: a :class:`.MigrationContext`
94
+ instance.
95
+
96
+ """
97
+ self.migration_context = migration_context
98
+ if impl is None:
99
+ self.impl = migration_context.impl
100
+ else:
101
+ self.impl = impl
102
+
103
+ self.schema_obj = schemaobj.SchemaObjects(migration_context)
104
+
105
+ @classmethod
106
+ def register_operation(
107
+ cls, name: str, sourcename: Optional[str] = None
108
+ ) -> Callable[[Type[_T]], Type[_T]]:
109
+ """Register a new operation for this class.
110
+
111
+ This method is normally used to add new operations
112
+ to the :class:`.Operations` class, and possibly the
113
+ :class:`.BatchOperations` class as well. All Alembic migration
114
+ operations are implemented via this system, however the system
115
+ is also available as a public API to facilitate adding custom
116
+ operations.
117
+
118
+ .. seealso::
119
+
120
+ :ref:`operation_plugins`
121
+
122
+
123
+ """
124
+
125
+ def register(op_cls: Type[_T]) -> Type[_T]:
126
+ if sourcename is None:
127
+ fn = getattr(op_cls, name)
128
+ source_name = fn.__name__
129
+ else:
130
+ fn = getattr(op_cls, sourcename)
131
+ source_name = fn.__name__
132
+
133
+ spec = inspect_getfullargspec(fn)
134
+
135
+ name_args = spec[0]
136
+ assert name_args[0:2] == ["cls", "operations"]
137
+
138
+ name_args[0:2] = ["self"]
139
+
140
+ args = inspect_formatargspec(
141
+ *spec, formatannotation=formatannotation_fwdref
142
+ )
143
+ num_defaults = len(spec[3]) if spec[3] else 0
144
+
145
+ defaulted_vals: Tuple[Any, ...]
146
+
147
+ if num_defaults:
148
+ defaulted_vals = tuple(name_args[0 - num_defaults :])
149
+ else:
150
+ defaulted_vals = ()
151
+
152
+ defaulted_vals += tuple(spec[4])
153
+ # here, we are using formatargspec in a different way in order
154
+ # to get a string that will re-apply incoming arguments to a new
155
+ # function call
156
+
157
+ apply_kw = inspect_formatargspec(
158
+ name_args + spec[4],
159
+ spec[1],
160
+ spec[2],
161
+ defaulted_vals,
162
+ formatvalue=lambda x: "=" + x,
163
+ formatannotation=formatannotation_fwdref,
164
+ )
165
+
166
+ args = re.sub(
167
+ r'[_]?ForwardRef\(([\'"].+?[\'"])\)',
168
+ lambda m: m.group(1),
169
+ args,
170
+ )
171
+
172
+ func_text = textwrap.dedent(
173
+ """\
174
+ def %(name)s%(args)s:
175
+ %(doc)r
176
+ return op_cls.%(source_name)s%(apply_kw)s
177
+ """
178
+ % {
179
+ "name": name,
180
+ "source_name": source_name,
181
+ "args": args,
182
+ "apply_kw": apply_kw,
183
+ "doc": fn.__doc__,
184
+ }
185
+ )
186
+
187
+ globals_ = dict(globals())
188
+ globals_.update({"op_cls": op_cls})
189
+ lcl: Dict[str, Any] = {}
190
+
191
+ exec(func_text, globals_, lcl)
192
+ setattr(cls, name, lcl[name])
193
+ fn.__func__.__doc__ = (
194
+ "This method is proxied on "
195
+ "the :class:`.%s` class, via the :meth:`.%s.%s` method."
196
+ % (cls.__name__, cls.__name__, name)
197
+ )
198
+ if hasattr(fn, "_legacy_translations"):
199
+ lcl[name]._legacy_translations = fn._legacy_translations
200
+ return op_cls
201
+
202
+ return register
203
+
204
+ @classmethod
205
+ def implementation_for(cls, op_cls: Any) -> Callable[[_C], _C]:
206
+ """Register an implementation for a given :class:`.MigrateOperation`.
207
+
208
+ This is part of the operation extensibility API.
209
+
210
+ .. seealso::
211
+
212
+ :ref:`operation_plugins` - example of use
213
+
214
+ """
215
+
216
+ def decorate(fn: _C) -> _C:
217
+ cls._to_impl.dispatch_for(op_cls)(fn)
218
+ return fn
219
+
220
+ return decorate
221
+
222
+ @classmethod
223
+ @contextmanager
224
+ def context(
225
+ cls, migration_context: MigrationContext
226
+ ) -> Iterator[Operations]:
227
+ op = Operations(migration_context)
228
+ op._install_proxy()
229
+ yield op
230
+ op._remove_proxy()
231
+
232
+ @contextmanager
233
+ def batch_alter_table(
234
+ self,
235
+ table_name: str,
236
+ schema: Optional[str] = None,
237
+ recreate: Literal["auto", "always", "never"] = "auto",
238
+ partial_reordering: Optional[Tuple[Any, ...]] = None,
239
+ copy_from: Optional[Table] = None,
240
+ table_args: Tuple[Any, ...] = (),
241
+ table_kwargs: Mapping[str, Any] = util.immutabledict(),
242
+ reflect_args: Tuple[Any, ...] = (),
243
+ reflect_kwargs: Mapping[str, Any] = util.immutabledict(),
244
+ naming_convention: Optional[Dict[str, str]] = None,
245
+ ) -> Iterator[BatchOperations]:
246
+ """Invoke a series of per-table migrations in batch.
247
+
248
+ Batch mode allows a series of operations specific to a table
249
+ to be syntactically grouped together, and allows for alternate
250
+ modes of table migration, in particular the "recreate" style of
251
+ migration required by SQLite.
252
+
253
+ "recreate" style is as follows:
254
+
255
+ 1. A new table is created with the new specification, based on the
256
+ migration directives within the batch, using a temporary name.
257
+
258
+ 2. the data copied from the existing table to the new table.
259
+
260
+ 3. the existing table is dropped.
261
+
262
+ 4. the new table is renamed to the existing table name.
263
+
264
+ The directive by default will only use "recreate" style on the
265
+ SQLite backend, and only if directives are present which require
266
+ this form, e.g. anything other than ``add_column()``. The batch
267
+ operation on other backends will proceed using standard ALTER TABLE
268
+ operations.
269
+
270
+ The method is used as a context manager, which returns an instance
271
+ of :class:`.BatchOperations`; this object is the same as
272
+ :class:`.Operations` except that table names and schema names
273
+ are omitted. E.g.::
274
+
275
+ with op.batch_alter_table("some_table") as batch_op:
276
+ batch_op.add_column(Column("foo", Integer))
277
+ batch_op.drop_column("bar")
278
+
279
+ The operations within the context manager are invoked at once
280
+ when the context is ended. When run against SQLite, if the
281
+ migrations include operations not supported by SQLite's ALTER TABLE,
282
+ the entire table will be copied to a new one with the new
283
+ specification, moving all data across as well.
284
+
285
+ The copy operation by default uses reflection to retrieve the current
286
+ structure of the table, and therefore :meth:`.batch_alter_table`
287
+ in this mode requires that the migration is run in "online" mode.
288
+ The ``copy_from`` parameter may be passed which refers to an existing
289
+ :class:`.Table` object, which will bypass this reflection step.
290
+
291
+ .. note:: The table copy operation will currently not copy
292
+ CHECK constraints, and may not copy UNIQUE constraints that are
293
+ unnamed, as is possible on SQLite. See the section
294
+ :ref:`sqlite_batch_constraints` for workarounds.
295
+
296
+ :param table_name: name of table
297
+ :param schema: optional schema name.
298
+ :param recreate: under what circumstances the table should be
299
+ recreated. At its default of ``"auto"``, the SQLite dialect will
300
+ recreate the table if any operations other than ``add_column()``,
301
+ ``create_index()``, or ``drop_index()`` are
302
+ present. Other options include ``"always"`` and ``"never"``.
303
+ :param copy_from: optional :class:`~sqlalchemy.schema.Table` object
304
+ that will act as the structure of the table being copied. If omitted,
305
+ table reflection is used to retrieve the structure of the table.
306
+
307
+ .. seealso::
308
+
309
+ :ref:`batch_offline_mode`
310
+
311
+ :paramref:`~.Operations.batch_alter_table.reflect_args`
312
+
313
+ :paramref:`~.Operations.batch_alter_table.reflect_kwargs`
314
+
315
+ :param reflect_args: a sequence of additional positional arguments that
316
+ will be applied to the table structure being reflected / copied;
317
+ this may be used to pass column and constraint overrides to the
318
+ table that will be reflected, in lieu of passing the whole
319
+ :class:`~sqlalchemy.schema.Table` using
320
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
321
+ :param reflect_kwargs: a dictionary of additional keyword arguments
322
+ that will be applied to the table structure being copied; this may be
323
+ used to pass additional table and reflection options to the table that
324
+ will be reflected, in lieu of passing the whole
325
+ :class:`~sqlalchemy.schema.Table` using
326
+ :paramref:`~.Operations.batch_alter_table.copy_from`.
327
+ :param table_args: a sequence of additional positional arguments that
328
+ will be applied to the new :class:`~sqlalchemy.schema.Table` when
329
+ created, in addition to those copied from the source table.
330
+ This may be used to provide additional constraints such as CHECK
331
+ constraints that may not be reflected.
332
+ :param table_kwargs: a dictionary of additional keyword arguments
333
+ that will be applied to the new :class:`~sqlalchemy.schema.Table`
334
+ when created, in addition to those copied from the source table.
335
+ This may be used to provide for additional table options that may
336
+ not be reflected.
337
+ :param naming_convention: a naming convention dictionary of the form
338
+ described at :ref:`autogen_naming_conventions` which will be applied
339
+ to the :class:`~sqlalchemy.schema.MetaData` during the reflection
340
+ process. This is typically required if one wants to drop SQLite
341
+ constraints, as these constraints will not have names when
342
+ reflected on this backend. Requires SQLAlchemy **0.9.4** or greater.
343
+
344
+ .. seealso::
345
+
346
+ :ref:`dropping_sqlite_foreign_keys`
347
+
348
+ :param partial_reordering: a list of tuples, each suggesting a desired
349
+ ordering of two or more columns in the newly created table. Requires
350
+ that :paramref:`.batch_alter_table.recreate` is set to ``"always"``.
351
+ Examples, given a table with columns "a", "b", "c", and "d":
352
+
353
+ Specify the order of all columns::
354
+
355
+ with op.batch_alter_table(
356
+ "some_table",
357
+ recreate="always",
358
+ partial_reordering=[("c", "d", "a", "b")],
359
+ ) as batch_op:
360
+ pass
361
+
362
+ Ensure "d" appears before "c", and "b", appears before "a"::
363
+
364
+ with op.batch_alter_table(
365
+ "some_table",
366
+ recreate="always",
367
+ partial_reordering=[("d", "c"), ("b", "a")],
368
+ ) as batch_op:
369
+ pass
370
+
371
+ The ordering of columns not included in the partial_reordering
372
+ set is undefined. Therefore it is best to specify the complete
373
+ ordering of all columns for best results.
374
+
375
+ .. note:: batch mode requires SQLAlchemy 0.8 or above.
376
+
377
+ .. seealso::
378
+
379
+ :ref:`batch_migrations`
380
+
381
+ """
382
+ impl = batch.BatchOperationsImpl(
383
+ self,
384
+ table_name,
385
+ schema,
386
+ recreate,
387
+ copy_from,
388
+ table_args,
389
+ table_kwargs,
390
+ reflect_args,
391
+ reflect_kwargs,
392
+ naming_convention,
393
+ partial_reordering,
394
+ )
395
+ batch_op = BatchOperations(self.migration_context, impl=impl)
396
+ yield batch_op
397
+ impl.flush()
398
+
399
+ def get_context(self) -> MigrationContext:
400
+ """Return the :class:`.MigrationContext` object that's
401
+ currently in use.
402
+
403
+ """
404
+
405
+ return self.migration_context
406
+
407
+ @overload
408
+ def invoke(self, operation: CreateTableOp) -> Table: ...
409
+
410
+ @overload
411
+ def invoke(
412
+ self,
413
+ operation: Union[
414
+ AddConstraintOp,
415
+ DropConstraintOp,
416
+ CreateIndexOp,
417
+ DropIndexOp,
418
+ AddColumnOp,
419
+ AlterColumnOp,
420
+ AlterTableOp,
421
+ CreateTableCommentOp,
422
+ DropTableCommentOp,
423
+ DropColumnOp,
424
+ BulkInsertOp,
425
+ DropTableOp,
426
+ ExecuteSQLOp,
427
+ ],
428
+ ) -> None: ...
429
+
430
+ @overload
431
+ def invoke(self, operation: MigrateOperation) -> Any: ...
432
+
433
+ def invoke(self, operation: MigrateOperation) -> Any:
434
+ """Given a :class:`.MigrateOperation`, invoke it in terms of
435
+ this :class:`.Operations` instance.
436
+
437
+ """
438
+ fn = self._to_impl.dispatch(
439
+ operation, self.migration_context.impl.__dialect__
440
+ )
441
+ return fn(self, operation)
442
+
443
+ def f(self, name: str) -> conv:
444
+ """Indicate a string name that has already had a naming convention
445
+ applied to it.
446
+
447
+ This feature combines with the SQLAlchemy ``naming_convention`` feature
448
+ to disambiguate constraint names that have already had naming
449
+ conventions applied to them, versus those that have not. This is
450
+ necessary in the case that the ``"%(constraint_name)s"`` token
451
+ is used within a naming convention, so that it can be identified
452
+ that this particular name should remain fixed.
453
+
454
+ If the :meth:`.Operations.f` is used on a constraint, the naming
455
+ convention will not take effect::
456
+
457
+ op.add_column("t", "x", Boolean(name=op.f("ck_bool_t_x")))
458
+
459
+ Above, the CHECK constraint generated will have the name
460
+ ``ck_bool_t_x`` regardless of whether or not a naming convention is
461
+ in use.
462
+
463
+ Alternatively, if a naming convention is in use, and 'f' is not used,
464
+ names will be converted along conventions. If the ``target_metadata``
465
+ contains the naming convention
466
+ ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the
467
+ output of the following::
468
+
469
+ op.add_column("t", "x", Boolean(name="x"))
470
+
471
+ will be::
472
+
473
+ CONSTRAINT ck_bool_t_x CHECK (x in (1, 0)))
474
+
475
+ The function is rendered in the output of autogenerate when
476
+ a particular constraint name is already converted.
477
+
478
+ """
479
+ return conv(name)
480
+
481
+ def inline_literal(
482
+ self, value: Union[str, int], type_: Optional[TypeEngine[Any]] = None
483
+ ) -> _literal_bindparam:
484
+ r"""Produce an 'inline literal' expression, suitable for
485
+ using in an INSERT, UPDATE, or DELETE statement.
486
+
487
+ When using Alembic in "offline" mode, CRUD operations
488
+ aren't compatible with SQLAlchemy's default behavior surrounding
489
+ literal values,
490
+ which is that they are converted into bound values and passed
491
+ separately into the ``execute()`` method of the DBAPI cursor.
492
+ An offline SQL
493
+ script needs to have these rendered inline. While it should
494
+ always be noted that inline literal values are an **enormous**
495
+ security hole in an application that handles untrusted input,
496
+ a schema migration is not run in this context, so
497
+ literals are safe to render inline, with the caveat that
498
+ advanced types like dates may not be supported directly
499
+ by SQLAlchemy.
500
+
501
+ See :meth:`.Operations.execute` for an example usage of
502
+ :meth:`.Operations.inline_literal`.
503
+
504
+ The environment can also be configured to attempt to render
505
+ "literal" values inline automatically, for those simple types
506
+ that are supported by the dialect; see
507
+ :paramref:`.EnvironmentContext.configure.literal_binds` for this
508
+ more recently added feature.
509
+
510
+ :param value: The value to render. Strings, integers, and simple
511
+ numerics should be supported. Other types like boolean,
512
+ dates, etc. may or may not be supported yet by various
513
+ backends.
514
+ :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine`
515
+ subclass stating the type of this value. In SQLAlchemy
516
+ expressions, this is usually derived automatically
517
+ from the Python type of the value itself, as well as
518
+ based on the context in which the value is used.
519
+
520
+ .. seealso::
521
+
522
+ :paramref:`.EnvironmentContext.configure.literal_binds`
523
+
524
+ """
525
+ return sqla_compat._literal_bindparam(None, value, type_=type_)
526
+
527
+ def get_bind(self) -> Connection:
528
+ """Return the current 'bind'.
529
+
530
+ Under normal circumstances, this is the
531
+ :class:`~sqlalchemy.engine.Connection` currently being used
532
+ to emit SQL to the database.
533
+
534
+ In a SQL script context, this value is ``None``. [TODO: verify this]
535
+
536
+ """
537
+ return self.migration_context.impl.bind # type: ignore[return-value]
538
+
539
+ def run_async(
540
+ self,
541
+ async_function: Callable[..., Awaitable[_T]],
542
+ *args: Any,
543
+ **kw_args: Any,
544
+ ) -> _T:
545
+ """Invoke the given asynchronous callable, passing an asynchronous
546
+ :class:`~sqlalchemy.ext.asyncio.AsyncConnection` as the first
547
+ argument.
548
+
549
+ This method allows calling async functions from within the
550
+ synchronous ``upgrade()`` or ``downgrade()`` alembic migration
551
+ method.
552
+
553
+ The async connection passed to the callable shares the same
554
+ transaction as the connection running in the migration context.
555
+
556
+ Any additional arg or kw_arg passed to this function are passed
557
+ to the provided async function.
558
+
559
+ .. versionadded: 1.11
560
+
561
+ .. note::
562
+
563
+ This method can be called only when alembic is called using
564
+ an async dialect.
565
+ """
566
+ if not sqla_compat.sqla_14_18:
567
+ raise NotImplementedError("SQLAlchemy 1.4.18+ required")
568
+ sync_conn = self.get_bind()
569
+ if sync_conn is None:
570
+ raise NotImplementedError("Cannot call run_async in SQL mode")
571
+ if not sync_conn.dialect.is_async:
572
+ raise ValueError("Cannot call run_async with a sync engine")
573
+ from sqlalchemy.ext.asyncio import AsyncConnection
574
+ from sqlalchemy.util import await_only
575
+
576
+ async_conn = AsyncConnection._retrieve_proxy_for_target(sync_conn)
577
+ return await_only(async_function(async_conn, *args, **kw_args))
578
+
579
+
580
+ class Operations(AbstractOperations):
581
+ """Define high level migration operations.
582
+
583
+ Each operation corresponds to some schema migration operation,
584
+ executed against a particular :class:`.MigrationContext`
585
+ which in turn represents connectivity to a database,
586
+ or a file output stream.
587
+
588
+ While :class:`.Operations` is normally configured as
589
+ part of the :meth:`.EnvironmentContext.run_migrations`
590
+ method called from an ``env.py`` script, a standalone
591
+ :class:`.Operations` instance can be
592
+ made for use cases external to regular Alembic
593
+ migrations by passing in a :class:`.MigrationContext`::
594
+
595
+ from alembic.migration import MigrationContext
596
+ from alembic.operations import Operations
597
+
598
+ conn = myengine.connect()
599
+ ctx = MigrationContext.configure(conn)
600
+ op = Operations(ctx)
601
+
602
+ op.alter_column("t", "c", nullable=True)
603
+
604
+ Note that as of 0.8, most of the methods on this class are produced
605
+ dynamically using the :meth:`.Operations.register_operation`
606
+ method.
607
+
608
+ """
609
+
610
+ if TYPE_CHECKING:
611
+ # START STUB FUNCTIONS: op_cls
612
+ # ### the following stubs are generated by tools/write_pyi.py ###
613
+ # ### do not edit ###
614
+
615
+ def add_column(
616
+ self,
617
+ table_name: str,
618
+ column: Column[Any],
619
+ *,
620
+ schema: Optional[str] = None,
621
+ if_not_exists: Optional[bool] = None,
622
+ ) -> None:
623
+ """Issue an "add column" instruction using the current
624
+ migration context.
625
+
626
+ e.g.::
627
+
628
+ from alembic import op
629
+ from sqlalchemy import Column, String
630
+
631
+ op.add_column("organization", Column("name", String()))
632
+
633
+ The :meth:`.Operations.add_column` method typically corresponds
634
+ to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope
635
+ of this command, the column's name, datatype, nullability,
636
+ and optional server-generated defaults may be indicated.
637
+
638
+ .. note::
639
+
640
+ With the exception of NOT NULL constraints or single-column FOREIGN
641
+ KEY constraints, other kinds of constraints such as PRIMARY KEY,
642
+ UNIQUE or CHECK constraints **cannot** be generated using this
643
+ method; for these constraints, refer to operations such as
644
+ :meth:`.Operations.create_primary_key` and
645
+ :meth:`.Operations.create_check_constraint`. In particular, the
646
+ following :class:`~sqlalchemy.schema.Column` parameters are
647
+ **ignored**:
648
+
649
+ * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
650
+ typically do not support an ALTER operation that can add
651
+ individual columns one at a time to an existing primary key
652
+ constraint, therefore it's less ambiguous to use the
653
+ :meth:`.Operations.create_primary_key` method, which assumes no
654
+ existing primary key constraint is present.
655
+ * :paramref:`~sqlalchemy.schema.Column.unique` - use the
656
+ :meth:`.Operations.create_unique_constraint` method
657
+ * :paramref:`~sqlalchemy.schema.Column.index` - use the
658
+ :meth:`.Operations.create_index` method
659
+
660
+
661
+ The provided :class:`~sqlalchemy.schema.Column` object may include a
662
+ :class:`~sqlalchemy.schema.ForeignKey` constraint directive,
663
+ referencing a remote table name. For this specific type of constraint,
664
+ Alembic will automatically emit a second ALTER statement in order to
665
+ add the single-column FOREIGN KEY constraint separately::
666
+
667
+ from alembic import op
668
+ from sqlalchemy import Column, INTEGER, ForeignKey
669
+
670
+ op.add_column(
671
+ "organization",
672
+ Column("account_id", INTEGER, ForeignKey("accounts.id")),
673
+ )
674
+
675
+ The column argument passed to :meth:`.Operations.add_column` is a
676
+ :class:`~sqlalchemy.schema.Column` construct, used in the same way it's
677
+ used in SQLAlchemy. In particular, values or functions to be indicated
678
+ as producing the column's default value on the database side are
679
+ specified using the ``server_default`` parameter, and not ``default``
680
+ which only specifies Python-side defaults::
681
+
682
+ from alembic import op
683
+ from sqlalchemy import Column, TIMESTAMP, func
684
+
685
+ # specify "DEFAULT NOW" along with the column add
686
+ op.add_column(
687
+ "account",
688
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
689
+ )
690
+
691
+ :param table_name: String name of the parent table.
692
+ :param column: a :class:`sqlalchemy.schema.Column` object
693
+ representing the new column.
694
+ :param schema: Optional schema name to operate within. To control
695
+ quoting of the schema outside of the default behavior, use
696
+ the SQLAlchemy construct
697
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
698
+ :param if_not_exists: If True, adds IF NOT EXISTS operator
699
+ when creating the new column for compatible dialects
700
+
701
+ .. versionadded:: 1.16.0
702
+
703
+ """ # noqa: E501
704
+ ...
705
+
706
+ def alter_column(
707
+ self,
708
+ table_name: str,
709
+ column_name: str,
710
+ *,
711
+ nullable: Optional[bool] = None,
712
+ comment: Union[str, Literal[False], None] = False,
713
+ server_default: Union[
714
+ str, bool, Identity, Computed, TextClause, None
715
+ ] = False,
716
+ new_column_name: Optional[str] = None,
717
+ type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None,
718
+ existing_type: Union[
719
+ TypeEngine[Any], Type[TypeEngine[Any]], None
720
+ ] = None,
721
+ existing_server_default: Union[
722
+ str, bool, Identity, Computed, TextClause, None
723
+ ] = False,
724
+ existing_nullable: Optional[bool] = None,
725
+ existing_comment: Optional[str] = None,
726
+ schema: Optional[str] = None,
727
+ **kw: Any,
728
+ ) -> None:
729
+ r"""Issue an "alter column" instruction using the
730
+ current migration context.
731
+
732
+ Generally, only that aspect of the column which
733
+ is being changed, i.e. name, type, nullability,
734
+ default, needs to be specified. Multiple changes
735
+ can also be specified at once and the backend should
736
+ "do the right thing", emitting each change either
737
+ separately or together as the backend allows.
738
+
739
+ MySQL has special requirements here, since MySQL
740
+ cannot ALTER a column without a full specification.
741
+ When producing MySQL-compatible migration files,
742
+ it is recommended that the ``existing_type``,
743
+ ``existing_server_default``, and ``existing_nullable``
744
+ parameters be present, if not being altered.
745
+
746
+ Type changes which are against the SQLAlchemy
747
+ "schema" types :class:`~sqlalchemy.types.Boolean`
748
+ and :class:`~sqlalchemy.types.Enum` may also
749
+ add or drop constraints which accompany those
750
+ types on backends that don't support them natively.
751
+ The ``existing_type`` argument is
752
+ used in this case to identify and remove a previous
753
+ constraint that was bound to the type object.
754
+
755
+ :param table_name: string name of the target table.
756
+ :param column_name: string name of the target column,
757
+ as it exists before the operation begins.
758
+ :param nullable: Optional; specify ``True`` or ``False``
759
+ to alter the column's nullability.
760
+ :param server_default: Optional; specify a string
761
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
762
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
763
+ an alteration to the column's default value.
764
+ Set to ``None`` to have the default removed.
765
+ :param comment: optional string text of a new comment to add to the
766
+ column.
767
+ :param new_column_name: Optional; specify a string name here to
768
+ indicate the new name within a column rename operation.
769
+ :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
770
+ type object to specify a change to the column's type.
771
+ For SQLAlchemy types that also indicate a constraint (i.e.
772
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
773
+ the constraint is also generated.
774
+ :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
775
+ currently understood by the MySQL dialect.
776
+ :param existing_type: Optional; a
777
+ :class:`~sqlalchemy.types.TypeEngine`
778
+ type object to specify the previous type. This
779
+ is required for all MySQL column alter operations that
780
+ don't otherwise specify a new type, as well as for
781
+ when nullability is being changed on a SQL Server
782
+ column. It is also used if the type is a so-called
783
+ SQLAlchemy "schema" type which may define a constraint (i.e.
784
+ :class:`~sqlalchemy.types.Boolean`,
785
+ :class:`~sqlalchemy.types.Enum`),
786
+ so that the constraint can be dropped.
787
+ :param existing_server_default: Optional; The existing
788
+ default value of the column. Required on MySQL if
789
+ an existing default is not being changed; else MySQL
790
+ removes the default.
791
+ :param existing_nullable: Optional; the existing nullability
792
+ of the column. Required on MySQL if the existing nullability
793
+ is not being changed; else MySQL sets this to NULL.
794
+ :param existing_autoincrement: Optional; the existing autoincrement
795
+ of the column. Used for MySQL's system of altering a column
796
+ that specifies ``AUTO_INCREMENT``.
797
+ :param existing_comment: string text of the existing comment on the
798
+ column to be maintained. Required on MySQL if the existing comment
799
+ on the column is not being changed.
800
+ :param schema: Optional schema name to operate within. To control
801
+ quoting of the schema outside of the default behavior, use
802
+ the SQLAlchemy construct
803
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
804
+ :param postgresql_using: String argument which will indicate a
805
+ SQL expression to render within the Postgresql-specific USING clause
806
+ within ALTER COLUMN. This string is taken directly as raw SQL which
807
+ must explicitly include any necessary quoting or escaping of tokens
808
+ within the expression.
809
+
810
+ """ # noqa: E501
811
+ ...
812
+
813
+ def bulk_insert(
814
+ self,
815
+ table: Union[Table, TableClause],
816
+ rows: List[Dict[str, Any]],
817
+ *,
818
+ multiinsert: bool = True,
819
+ ) -> None:
820
+ """Issue a "bulk insert" operation using the current
821
+ migration context.
822
+
823
+ This provides a means of representing an INSERT of multiple rows
824
+ which works equally well in the context of executing on a live
825
+ connection as well as that of generating a SQL script. In the
826
+ case of a SQL script, the values are rendered inline into the
827
+ statement.
828
+
829
+ e.g.::
830
+
831
+ from alembic import op
832
+ from datetime import date
833
+ from sqlalchemy.sql import table, column
834
+ from sqlalchemy import String, Integer, Date
835
+
836
+ # Create an ad-hoc table to use for the insert statement.
837
+ accounts_table = table(
838
+ "account",
839
+ column("id", Integer),
840
+ column("name", String),
841
+ column("create_date", Date),
842
+ )
843
+
844
+ op.bulk_insert(
845
+ accounts_table,
846
+ [
847
+ {
848
+ "id": 1,
849
+ "name": "John Smith",
850
+ "create_date": date(2010, 10, 5),
851
+ },
852
+ {
853
+ "id": 2,
854
+ "name": "Ed Williams",
855
+ "create_date": date(2007, 5, 27),
856
+ },
857
+ {
858
+ "id": 3,
859
+ "name": "Wendy Jones",
860
+ "create_date": date(2008, 8, 15),
861
+ },
862
+ ],
863
+ )
864
+
865
+ When using --sql mode, some datatypes may not render inline
866
+ automatically, such as dates and other special types. When this
867
+ issue is present, :meth:`.Operations.inline_literal` may be used::
868
+
869
+ op.bulk_insert(
870
+ accounts_table,
871
+ [
872
+ {
873
+ "id": 1,
874
+ "name": "John Smith",
875
+ "create_date": op.inline_literal("2010-10-05"),
876
+ },
877
+ {
878
+ "id": 2,
879
+ "name": "Ed Williams",
880
+ "create_date": op.inline_literal("2007-05-27"),
881
+ },
882
+ {
883
+ "id": 3,
884
+ "name": "Wendy Jones",
885
+ "create_date": op.inline_literal("2008-08-15"),
886
+ },
887
+ ],
888
+ multiinsert=False,
889
+ )
890
+
891
+ When using :meth:`.Operations.inline_literal` in conjunction with
892
+ :meth:`.Operations.bulk_insert`, in order for the statement to work
893
+ in "online" (e.g. non --sql) mode, the
894
+ :paramref:`~.Operations.bulk_insert.multiinsert`
895
+ flag should be set to ``False``, which will have the effect of
896
+ individual INSERT statements being emitted to the database, each
897
+ with a distinct VALUES clause, so that the "inline" values can
898
+ still be rendered, rather than attempting to pass the values
899
+ as bound parameters.
900
+
901
+ :param table: a table object which represents the target of the INSERT.
902
+
903
+ :param rows: a list of dictionaries indicating rows.
904
+
905
+ :param multiinsert: when at its default of True and --sql mode is not
906
+ enabled, the INSERT statement will be executed using
907
+ "executemany()" style, where all elements in the list of
908
+ dictionaries are passed as bound parameters in a single
909
+ list. Setting this to False results in individual INSERT
910
+ statements being emitted per parameter set, and is needed
911
+ in those cases where non-literal values are present in the
912
+ parameter sets.
913
+
914
+ """ # noqa: E501
915
+ ...
916
+
917
+ def create_check_constraint(
918
+ self,
919
+ constraint_name: Optional[str],
920
+ table_name: str,
921
+ condition: Union[str, ColumnElement[bool], TextClause],
922
+ *,
923
+ schema: Optional[str] = None,
924
+ **kw: Any,
925
+ ) -> None:
926
+ """Issue a "create check constraint" instruction using the
927
+ current migration context.
928
+
929
+ e.g.::
930
+
931
+ from alembic import op
932
+ from sqlalchemy.sql import column, func
933
+
934
+ op.create_check_constraint(
935
+ "ck_user_name_len",
936
+ "user",
937
+ func.len(column("name")) > 5,
938
+ )
939
+
940
+ CHECK constraints are usually against a SQL expression, so ad-hoc
941
+ table metadata is usually needed. The function will convert the given
942
+ arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
943
+ to an anonymous table in order to emit the CREATE statement.
944
+
945
+ :param name: Name of the check constraint. The name is necessary
946
+ so that an ALTER statement can be emitted. For setups that
947
+ use an automated naming scheme such as that described at
948
+ :ref:`sqla:constraint_naming_conventions`,
949
+ ``name`` here can be ``None``, as the event listener will
950
+ apply the name to the constraint object when it is associated
951
+ with the table.
952
+ :param table_name: String name of the source table.
953
+ :param condition: SQL expression that's the condition of the
954
+ constraint. Can be a string or SQLAlchemy expression language
955
+ structure.
956
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
957
+ NOT DEFERRABLE when issuing DDL for this constraint.
958
+ :param initially: optional string. If set, emit INITIALLY <value>
959
+ when issuing DDL for this constraint.
960
+ :param schema: Optional schema name to operate within. To control
961
+ quoting of the schema outside of the default behavior, use
962
+ the SQLAlchemy construct
963
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
964
+
965
+ """ # noqa: E501
966
+ ...
967
+
968
+ def create_exclude_constraint(
969
+ self,
970
+ constraint_name: str,
971
+ table_name: str,
972
+ *elements: Any,
973
+ **kw: Any,
974
+ ) -> Optional[Table]:
975
+ """Issue an alter to create an EXCLUDE constraint using the
976
+ current migration context.
977
+
978
+ .. note:: This method is Postgresql specific, and additionally
979
+ requires at least SQLAlchemy 1.0.
980
+
981
+ e.g.::
982
+
983
+ from alembic import op
984
+
985
+ op.create_exclude_constraint(
986
+ "user_excl",
987
+ "user",
988
+ ("period", "&&"),
989
+ ("group", "="),
990
+ where=("group != 'some group'"),
991
+ )
992
+
993
+ Note that the expressions work the same way as that of
994
+ the ``ExcludeConstraint`` object itself; if plain strings are
995
+ passed, quoting rules must be applied manually.
996
+
997
+ :param name: Name of the constraint.
998
+ :param table_name: String name of the source table.
999
+ :param elements: exclude conditions.
1000
+ :param where: SQL expression or SQL string with optional WHERE
1001
+ clause.
1002
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
1003
+ NOT DEFERRABLE when issuing DDL for this constraint.
1004
+ :param initially: optional string. If set, emit INITIALLY <value>
1005
+ when issuing DDL for this constraint.
1006
+ :param schema: Optional schema name to operate within.
1007
+
1008
+ """ # noqa: E501
1009
+ ...
1010
+
1011
+ def create_foreign_key(
1012
+ self,
1013
+ constraint_name: Optional[str],
1014
+ source_table: str,
1015
+ referent_table: str,
1016
+ local_cols: List[str],
1017
+ remote_cols: List[str],
1018
+ *,
1019
+ onupdate: Optional[str] = None,
1020
+ ondelete: Optional[str] = None,
1021
+ deferrable: Optional[bool] = None,
1022
+ initially: Optional[str] = None,
1023
+ match: Optional[str] = None,
1024
+ source_schema: Optional[str] = None,
1025
+ referent_schema: Optional[str] = None,
1026
+ **dialect_kw: Any,
1027
+ ) -> None:
1028
+ """Issue a "create foreign key" instruction using the
1029
+ current migration context.
1030
+
1031
+ e.g.::
1032
+
1033
+ from alembic import op
1034
+
1035
+ op.create_foreign_key(
1036
+ "fk_user_address",
1037
+ "address",
1038
+ "user",
1039
+ ["user_id"],
1040
+ ["id"],
1041
+ )
1042
+
1043
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
1044
+ containing the necessary columns, then generates a new
1045
+ :class:`~sqlalchemy.schema.ForeignKeyConstraint`
1046
+ object which it then associates with the
1047
+ :class:`~sqlalchemy.schema.Table`.
1048
+ Any event listeners associated with this action will be fired
1049
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
1050
+ construct is ultimately used to generate the ALTER statement.
1051
+
1052
+ :param constraint_name: Name of the foreign key constraint. The name
1053
+ is necessary so that an ALTER statement can be emitted. For setups
1054
+ that use an automated naming scheme such as that described at
1055
+ :ref:`sqla:constraint_naming_conventions`,
1056
+ ``name`` here can be ``None``, as the event listener will
1057
+ apply the name to the constraint object when it is associated
1058
+ with the table.
1059
+ :param source_table: String name of the source table.
1060
+ :param referent_table: String name of the destination table.
1061
+ :param local_cols: a list of string column names in the
1062
+ source table.
1063
+ :param remote_cols: a list of string column names in the
1064
+ remote table.
1065
+ :param onupdate: Optional string. If set, emit ON UPDATE <value> when
1066
+ issuing DDL for this constraint. Typical values include CASCADE,
1067
+ DELETE and RESTRICT.
1068
+ :param ondelete: Optional string. If set, emit ON DELETE <value> when
1069
+ issuing DDL for this constraint. Typical values include CASCADE,
1070
+ DELETE and RESTRICT.
1071
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT
1072
+ DEFERRABLE when issuing DDL for this constraint.
1073
+ :param source_schema: Optional schema name of the source table.
1074
+ :param referent_schema: Optional schema name of the destination table.
1075
+
1076
+ """ # noqa: E501
1077
+ ...
1078
+
1079
+ def create_index(
1080
+ self,
1081
+ index_name: Optional[str],
1082
+ table_name: str,
1083
+ columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
1084
+ *,
1085
+ schema: Optional[str] = None,
1086
+ unique: bool = False,
1087
+ if_not_exists: Optional[bool] = None,
1088
+ **kw: Any,
1089
+ ) -> None:
1090
+ r"""Issue a "create index" instruction using the current
1091
+ migration context.
1092
+
1093
+ e.g.::
1094
+
1095
+ from alembic import op
1096
+
1097
+ op.create_index("ik_test", "t1", ["foo", "bar"])
1098
+
1099
+ Functional indexes can be produced by using the
1100
+ :func:`sqlalchemy.sql.expression.text` construct::
1101
+
1102
+ from alembic import op
1103
+ from sqlalchemy import text
1104
+
1105
+ op.create_index("ik_test", "t1", [text("lower(foo)")])
1106
+
1107
+ :param index_name: name of the index.
1108
+ :param table_name: name of the owning table.
1109
+ :param columns: a list consisting of string column names and/or
1110
+ :func:`~sqlalchemy.sql.expression.text` constructs.
1111
+ :param schema: Optional schema name to operate within. To control
1112
+ quoting of the schema outside of the default behavior, use
1113
+ the SQLAlchemy construct
1114
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1115
+ :param unique: If True, create a unique index.
1116
+
1117
+ :param quote: Force quoting of this column's name on or off,
1118
+ corresponding to ``True`` or ``False``. When left at its default
1119
+ of ``None``, the column identifier will be quoted according to
1120
+ whether the name is case sensitive (identifiers with at least one
1121
+ upper case character are treated as case sensitive), or if it's a
1122
+ reserved word. This flag is only needed to force quoting of a
1123
+ reserved word which is not known by the SQLAlchemy dialect.
1124
+
1125
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
1126
+ creating the new index.
1127
+
1128
+ .. versionadded:: 1.12.0
1129
+
1130
+ :param \**kw: Additional keyword arguments not mentioned above are
1131
+ dialect specific, and passed in the form
1132
+ ``<dialectname>_<argname>``.
1133
+ See the documentation regarding an individual dialect at
1134
+ :ref:`dialect_toplevel` for detail on documented arguments.
1135
+
1136
+ """ # noqa: E501
1137
+ ...
1138
+
1139
+ def create_primary_key(
1140
+ self,
1141
+ constraint_name: Optional[str],
1142
+ table_name: str,
1143
+ columns: List[str],
1144
+ *,
1145
+ schema: Optional[str] = None,
1146
+ ) -> None:
1147
+ """Issue a "create primary key" instruction using the current
1148
+ migration context.
1149
+
1150
+ e.g.::
1151
+
1152
+ from alembic import op
1153
+
1154
+ op.create_primary_key("pk_my_table", "my_table", ["id", "version"])
1155
+
1156
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
1157
+ containing the necessary columns, then generates a new
1158
+ :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
1159
+ object which it then associates with the
1160
+ :class:`~sqlalchemy.schema.Table`.
1161
+ Any event listeners associated with this action will be fired
1162
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
1163
+ construct is ultimately used to generate the ALTER statement.
1164
+
1165
+ :param constraint_name: Name of the primary key constraint. The name
1166
+ is necessary so that an ALTER statement can be emitted. For setups
1167
+ that use an automated naming scheme such as that described at
1168
+ :ref:`sqla:constraint_naming_conventions`
1169
+ ``name`` here can be ``None``, as the event listener will
1170
+ apply the name to the constraint object when it is associated
1171
+ with the table.
1172
+ :param table_name: String name of the target table.
1173
+ :param columns: a list of string column names to be applied to the
1174
+ primary key constraint.
1175
+ :param schema: Optional schema name to operate within. To control
1176
+ quoting of the schema outside of the default behavior, use
1177
+ the SQLAlchemy construct
1178
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1179
+
1180
+ """ # noqa: E501
1181
+ ...
1182
+
1183
+ def create_table(
1184
+ self,
1185
+ table_name: str,
1186
+ *columns: SchemaItem,
1187
+ if_not_exists: Optional[bool] = None,
1188
+ **kw: Any,
1189
+ ) -> Table:
1190
+ r"""Issue a "create table" instruction using the current migration
1191
+ context.
1192
+
1193
+ This directive receives an argument list similar to that of the
1194
+ traditional :class:`sqlalchemy.schema.Table` construct, but without the
1195
+ metadata::
1196
+
1197
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
1198
+ from alembic import op
1199
+
1200
+ op.create_table(
1201
+ "account",
1202
+ Column("id", INTEGER, primary_key=True),
1203
+ Column("name", VARCHAR(50), nullable=False),
1204
+ Column("description", NVARCHAR(200)),
1205
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
1206
+ )
1207
+
1208
+ Note that :meth:`.create_table` accepts
1209
+ :class:`~sqlalchemy.schema.Column`
1210
+ constructs directly from the SQLAlchemy library. In particular,
1211
+ default values to be created on the database side are
1212
+ specified using the ``server_default`` parameter, and not
1213
+ ``default`` which only specifies Python-side defaults::
1214
+
1215
+ from alembic import op
1216
+ from sqlalchemy import Column, TIMESTAMP, func
1217
+
1218
+ # specify "DEFAULT NOW" along with the "timestamp" column
1219
+ op.create_table(
1220
+ "account",
1221
+ Column("id", INTEGER, primary_key=True),
1222
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
1223
+ )
1224
+
1225
+ The function also returns a newly created
1226
+ :class:`~sqlalchemy.schema.Table` object, corresponding to the table
1227
+ specification given, which is suitable for
1228
+ immediate SQL operations, in particular
1229
+ :meth:`.Operations.bulk_insert`::
1230
+
1231
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
1232
+ from alembic import op
1233
+
1234
+ account_table = op.create_table(
1235
+ "account",
1236
+ Column("id", INTEGER, primary_key=True),
1237
+ Column("name", VARCHAR(50), nullable=False),
1238
+ Column("description", NVARCHAR(200)),
1239
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
1240
+ )
1241
+
1242
+ op.bulk_insert(
1243
+ account_table,
1244
+ [
1245
+ {"name": "A1", "description": "account 1"},
1246
+ {"name": "A2", "description": "account 2"},
1247
+ ],
1248
+ )
1249
+
1250
+ :param table_name: Name of the table
1251
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
1252
+ objects within
1253
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
1254
+ objects
1255
+ and :class:`~.sqlalchemy.schema.Index` objects.
1256
+ :param schema: Optional schema name to operate within. To control
1257
+ quoting of the schema outside of the default behavior, use
1258
+ the SQLAlchemy construct
1259
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1260
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
1261
+ creating the new table.
1262
+
1263
+ .. versionadded:: 1.13.3
1264
+ :param \**kw: Other keyword arguments are passed to the underlying
1265
+ :class:`sqlalchemy.schema.Table` object created for the command.
1266
+
1267
+ :return: the :class:`~sqlalchemy.schema.Table` object corresponding
1268
+ to the parameters given.
1269
+
1270
+ """ # noqa: E501
1271
+ ...
1272
+
1273
+ def create_table_comment(
1274
+ self,
1275
+ table_name: str,
1276
+ comment: Optional[str],
1277
+ *,
1278
+ existing_comment: Optional[str] = None,
1279
+ schema: Optional[str] = None,
1280
+ ) -> None:
1281
+ """Emit a COMMENT ON operation to set the comment for a table.
1282
+
1283
+ :param table_name: string name of the target table.
1284
+ :param comment: string value of the comment being registered against
1285
+ the specified table.
1286
+ :param existing_comment: String value of a comment
1287
+ already registered on the specified table, used within autogenerate
1288
+ so that the operation is reversible, but not required for direct
1289
+ use.
1290
+
1291
+ .. seealso::
1292
+
1293
+ :meth:`.Operations.drop_table_comment`
1294
+
1295
+ :paramref:`.Operations.alter_column.comment`
1296
+
1297
+ """ # noqa: E501
1298
+ ...
1299
+
1300
+ def create_unique_constraint(
1301
+ self,
1302
+ constraint_name: Optional[str],
1303
+ table_name: str,
1304
+ columns: Sequence[str],
1305
+ *,
1306
+ schema: Optional[str] = None,
1307
+ **kw: Any,
1308
+ ) -> Any:
1309
+ """Issue a "create unique constraint" instruction using the
1310
+ current migration context.
1311
+
1312
+ e.g.::
1313
+
1314
+ from alembic import op
1315
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
1316
+
1317
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
1318
+ containing the necessary columns, then generates a new
1319
+ :class:`~sqlalchemy.schema.UniqueConstraint`
1320
+ object which it then associates with the
1321
+ :class:`~sqlalchemy.schema.Table`.
1322
+ Any event listeners associated with this action will be fired
1323
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
1324
+ construct is ultimately used to generate the ALTER statement.
1325
+
1326
+ :param name: Name of the unique constraint. The name is necessary
1327
+ so that an ALTER statement can be emitted. For setups that
1328
+ use an automated naming scheme such as that described at
1329
+ :ref:`sqla:constraint_naming_conventions`,
1330
+ ``name`` here can be ``None``, as the event listener will
1331
+ apply the name to the constraint object when it is associated
1332
+ with the table.
1333
+ :param table_name: String name of the source table.
1334
+ :param columns: a list of string column names in the
1335
+ source table.
1336
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
1337
+ NOT DEFERRABLE when issuing DDL for this constraint.
1338
+ :param initially: optional string. If set, emit INITIALLY <value>
1339
+ when issuing DDL for this constraint.
1340
+ :param schema: Optional schema name to operate within. To control
1341
+ quoting of the schema outside of the default behavior, use
1342
+ the SQLAlchemy construct
1343
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1344
+
1345
+ """ # noqa: E501
1346
+ ...
1347
+
1348
+ def drop_column(
1349
+ self,
1350
+ table_name: str,
1351
+ column_name: str,
1352
+ *,
1353
+ schema: Optional[str] = None,
1354
+ **kw: Any,
1355
+ ) -> None:
1356
+ """Issue a "drop column" instruction using the current
1357
+ migration context.
1358
+
1359
+ e.g.::
1360
+
1361
+ drop_column("organization", "account_id")
1362
+
1363
+ :param table_name: name of table
1364
+ :param column_name: name of column
1365
+ :param schema: Optional schema name to operate within. To control
1366
+ quoting of the schema outside of the default behavior, use
1367
+ the SQLAlchemy construct
1368
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1369
+ :param if_exists: If True, adds IF EXISTS operator when
1370
+ dropping the new column for compatible dialects
1371
+
1372
+ .. versionadded:: 1.16.0
1373
+
1374
+ :param mssql_drop_check: Optional boolean. When ``True``, on
1375
+ Microsoft SQL Server only, first
1376
+ drop the CHECK constraint on the column using a
1377
+ SQL-script-compatible
1378
+ block that selects into a @variable from sys.check_constraints,
1379
+ then exec's a separate DROP CONSTRAINT for that constraint.
1380
+ :param mssql_drop_default: Optional boolean. When ``True``, on
1381
+ Microsoft SQL Server only, first
1382
+ drop the DEFAULT constraint on the column using a
1383
+ SQL-script-compatible
1384
+ block that selects into a @variable from sys.default_constraints,
1385
+ then exec's a separate DROP CONSTRAINT for that default.
1386
+ :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
1387
+ Microsoft SQL Server only, first
1388
+ drop a single FOREIGN KEY constraint on the column using a
1389
+ SQL-script-compatible
1390
+ block that selects into a @variable from
1391
+ sys.foreign_keys/sys.foreign_key_columns,
1392
+ then exec's a separate DROP CONSTRAINT for that default. Only
1393
+ works if the column has exactly one FK constraint which refers to
1394
+ it, at the moment.
1395
+ """ # noqa: E501
1396
+ ...
1397
+
1398
+ def drop_constraint(
1399
+ self,
1400
+ constraint_name: str,
1401
+ table_name: str,
1402
+ type_: Optional[str] = None,
1403
+ *,
1404
+ schema: Optional[str] = None,
1405
+ if_exists: Optional[bool] = None,
1406
+ ) -> None:
1407
+ r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
1408
+
1409
+ :param constraint_name: name of the constraint.
1410
+ :param table_name: table name.
1411
+ :param type\_: optional, required on MySQL. can be
1412
+ 'foreignkey', 'primary', 'unique', or 'check'.
1413
+ :param schema: Optional schema name to operate within. To control
1414
+ quoting of the schema outside of the default behavior, use
1415
+ the SQLAlchemy construct
1416
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1417
+ :param if_exists: If True, adds IF EXISTS operator when
1418
+ dropping the constraint
1419
+
1420
+ .. versionadded:: 1.16.0
1421
+
1422
+ """ # noqa: E501
1423
+ ...
1424
+
1425
+ def drop_index(
1426
+ self,
1427
+ index_name: str,
1428
+ table_name: Optional[str] = None,
1429
+ *,
1430
+ schema: Optional[str] = None,
1431
+ if_exists: Optional[bool] = None,
1432
+ **kw: Any,
1433
+ ) -> None:
1434
+ r"""Issue a "drop index" instruction using the current
1435
+ migration context.
1436
+
1437
+ e.g.::
1438
+
1439
+ drop_index("accounts")
1440
+
1441
+ :param index_name: name of the index.
1442
+ :param table_name: name of the owning table. Some
1443
+ backends such as Microsoft SQL Server require this.
1444
+ :param schema: Optional schema name to operate within. To control
1445
+ quoting of the schema outside of the default behavior, use
1446
+ the SQLAlchemy construct
1447
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1448
+
1449
+ :param if_exists: If True, adds IF EXISTS operator when
1450
+ dropping the index.
1451
+
1452
+ .. versionadded:: 1.12.0
1453
+
1454
+ :param \**kw: Additional keyword arguments not mentioned above are
1455
+ dialect specific, and passed in the form
1456
+ ``<dialectname>_<argname>``.
1457
+ See the documentation regarding an individual dialect at
1458
+ :ref:`dialect_toplevel` for detail on documented arguments.
1459
+
1460
+ """ # noqa: E501
1461
+ ...
1462
+
1463
+ def drop_table(
1464
+ self,
1465
+ table_name: str,
1466
+ *,
1467
+ schema: Optional[str] = None,
1468
+ if_exists: Optional[bool] = None,
1469
+ **kw: Any,
1470
+ ) -> None:
1471
+ r"""Issue a "drop table" instruction using the current
1472
+ migration context.
1473
+
1474
+
1475
+ e.g.::
1476
+
1477
+ drop_table("accounts")
1478
+
1479
+ :param table_name: Name of the table
1480
+ :param schema: Optional schema name to operate within. To control
1481
+ quoting of the schema outside of the default behavior, use
1482
+ the SQLAlchemy construct
1483
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1484
+ :param if_exists: If True, adds IF EXISTS operator when
1485
+ dropping the table.
1486
+
1487
+ .. versionadded:: 1.13.3
1488
+ :param \**kw: Other keyword arguments are passed to the underlying
1489
+ :class:`sqlalchemy.schema.Table` object created for the command.
1490
+
1491
+ """ # noqa: E501
1492
+ ...
1493
+
1494
+ def drop_table_comment(
1495
+ self,
1496
+ table_name: str,
1497
+ *,
1498
+ existing_comment: Optional[str] = None,
1499
+ schema: Optional[str] = None,
1500
+ ) -> None:
1501
+ """Issue a "drop table comment" operation to
1502
+ remove an existing comment set on a table.
1503
+
1504
+ :param table_name: string name of the target table.
1505
+ :param existing_comment: An optional string value of a comment already
1506
+ registered on the specified table.
1507
+
1508
+ .. seealso::
1509
+
1510
+ :meth:`.Operations.create_table_comment`
1511
+
1512
+ :paramref:`.Operations.alter_column.comment`
1513
+
1514
+ """ # noqa: E501
1515
+ ...
1516
+
1517
+ def execute(
1518
+ self,
1519
+ sqltext: Union[Executable, str],
1520
+ *,
1521
+ execution_options: Optional[dict[str, Any]] = None,
1522
+ ) -> None:
1523
+ r"""Execute the given SQL using the current migration context.
1524
+
1525
+ The given SQL can be a plain string, e.g.::
1526
+
1527
+ op.execute("INSERT INTO table (foo) VALUES ('some value')")
1528
+
1529
+ Or it can be any kind of Core SQL Expression construct, such as
1530
+ below where we use an update construct::
1531
+
1532
+ from sqlalchemy.sql import table, column
1533
+ from sqlalchemy import String
1534
+ from alembic import op
1535
+
1536
+ account = table("account", column("name", String))
1537
+ op.execute(
1538
+ account.update()
1539
+ .where(account.c.name == op.inline_literal("account 1"))
1540
+ .values({"name": op.inline_literal("account 2")})
1541
+ )
1542
+
1543
+ Above, we made use of the SQLAlchemy
1544
+ :func:`sqlalchemy.sql.expression.table` and
1545
+ :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
1546
+ ad-hoc table construct just for our UPDATE statement. A full
1547
+ :class:`~sqlalchemy.schema.Table` construct of course works perfectly
1548
+ fine as well, though note it's a recommended practice to at least
1549
+ ensure the definition of a table is self-contained within the migration
1550
+ script, rather than imported from a module that may break compatibility
1551
+ with older migrations.
1552
+
1553
+ In a SQL script context, the statement is emitted directly to the
1554
+ output stream. There is *no* return result, however, as this
1555
+ function is oriented towards generating a change script
1556
+ that can run in "offline" mode. Additionally, parameterized
1557
+ statements are discouraged here, as they *will not work* in offline
1558
+ mode. Above, we use :meth:`.inline_literal` where parameters are
1559
+ to be used.
1560
+
1561
+ For full interaction with a connected database where parameters can
1562
+ also be used normally, use the "bind" available from the context::
1563
+
1564
+ from alembic import op
1565
+
1566
+ connection = op.get_bind()
1567
+
1568
+ connection.execute(
1569
+ account.update()
1570
+ .where(account.c.name == "account 1")
1571
+ .values({"name": "account 2"})
1572
+ )
1573
+
1574
+ Additionally, when passing the statement as a plain string, it is first
1575
+ coerced into a :func:`sqlalchemy.sql.expression.text` construct
1576
+ before being passed along. In the less likely case that the
1577
+ literal SQL string contains a colon, it must be escaped with a
1578
+ backslash, as::
1579
+
1580
+ op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')")
1581
+
1582
+
1583
+ :param sqltext: Any legal SQLAlchemy expression, including:
1584
+
1585
+ * a string
1586
+ * a :func:`sqlalchemy.sql.expression.text` construct.
1587
+ * a :func:`sqlalchemy.sql.expression.insert` construct.
1588
+ * a :func:`sqlalchemy.sql.expression.update` construct.
1589
+ * a :func:`sqlalchemy.sql.expression.delete` construct.
1590
+ * Any "executable" described in SQLAlchemy Core documentation,
1591
+ noting that no result set is returned.
1592
+
1593
+ .. note:: when passing a plain string, the statement is coerced into
1594
+ a :func:`sqlalchemy.sql.expression.text` construct. This construct
1595
+ considers symbols with colons, e.g. ``:foo`` to be bound parameters.
1596
+ To avoid this, ensure that colon symbols are escaped, e.g.
1597
+ ``\:foo``.
1598
+
1599
+ :param execution_options: Optional dictionary of
1600
+ execution options, will be passed to
1601
+ :meth:`sqlalchemy.engine.Connection.execution_options`.
1602
+ """ # noqa: E501
1603
+ ...
1604
+
1605
+ def rename_table(
1606
+ self,
1607
+ old_table_name: str,
1608
+ new_table_name: str,
1609
+ *,
1610
+ schema: Optional[str] = None,
1611
+ ) -> None:
1612
+ """Emit an ALTER TABLE to rename a table.
1613
+
1614
+ :param old_table_name: old name.
1615
+ :param new_table_name: new name.
1616
+ :param schema: Optional schema name to operate within. To control
1617
+ quoting of the schema outside of the default behavior, use
1618
+ the SQLAlchemy construct
1619
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1620
+
1621
+ """ # noqa: E501
1622
+ ...
1623
+
1624
+ # END STUB FUNCTIONS: op_cls
1625
+
1626
+
1627
+ class BatchOperations(AbstractOperations):
1628
+ """Modifies the interface :class:`.Operations` for batch mode.
1629
+
1630
+ This basically omits the ``table_name`` and ``schema`` parameters
1631
+ from associated methods, as these are a given when running under batch
1632
+ mode.
1633
+
1634
+ .. seealso::
1635
+
1636
+ :meth:`.Operations.batch_alter_table`
1637
+
1638
+ Note that as of 0.8, most of the methods on this class are produced
1639
+ dynamically using the :meth:`.Operations.register_operation`
1640
+ method.
1641
+
1642
+ """
1643
+
1644
+ impl: BatchOperationsImpl
1645
+
1646
+ def _noop(self, operation: Any) -> NoReturn:
1647
+ raise NotImplementedError(
1648
+ "The %s method does not apply to a batch table alter operation."
1649
+ % operation
1650
+ )
1651
+
1652
+ if TYPE_CHECKING:
1653
+ # START STUB FUNCTIONS: batch_op
1654
+ # ### the following stubs are generated by tools/write_pyi.py ###
1655
+ # ### do not edit ###
1656
+
1657
+ def add_column(
1658
+ self,
1659
+ column: Column[Any],
1660
+ *,
1661
+ insert_before: Optional[str] = None,
1662
+ insert_after: Optional[str] = None,
1663
+ if_not_exists: Optional[bool] = None,
1664
+ ) -> None:
1665
+ """Issue an "add column" instruction using the current
1666
+ batch migration context.
1667
+
1668
+ .. seealso::
1669
+
1670
+ :meth:`.Operations.add_column`
1671
+
1672
+ """ # noqa: E501
1673
+ ...
1674
+
1675
+ def alter_column(
1676
+ self,
1677
+ column_name: str,
1678
+ *,
1679
+ nullable: Optional[bool] = None,
1680
+ comment: Union[str, Literal[False], None] = False,
1681
+ server_default: Any = False,
1682
+ new_column_name: Optional[str] = None,
1683
+ type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None,
1684
+ existing_type: Union[
1685
+ TypeEngine[Any], Type[TypeEngine[Any]], None
1686
+ ] = None,
1687
+ existing_server_default: Union[
1688
+ str, bool, Identity, Computed, None
1689
+ ] = False,
1690
+ existing_nullable: Optional[bool] = None,
1691
+ existing_comment: Optional[str] = None,
1692
+ insert_before: Optional[str] = None,
1693
+ insert_after: Optional[str] = None,
1694
+ **kw: Any,
1695
+ ) -> None:
1696
+ """Issue an "alter column" instruction using the current
1697
+ batch migration context.
1698
+
1699
+ Parameters are the same as that of :meth:`.Operations.alter_column`,
1700
+ as well as the following option(s):
1701
+
1702
+ :param insert_before: String name of an existing column which this
1703
+ column should be placed before, when creating the new table.
1704
+
1705
+ :param insert_after: String name of an existing column which this
1706
+ column should be placed after, when creating the new table. If
1707
+ both :paramref:`.BatchOperations.alter_column.insert_before`
1708
+ and :paramref:`.BatchOperations.alter_column.insert_after` are
1709
+ omitted, the column is inserted after the last existing column
1710
+ in the table.
1711
+
1712
+ .. seealso::
1713
+
1714
+ :meth:`.Operations.alter_column`
1715
+
1716
+
1717
+ """ # noqa: E501
1718
+ ...
1719
+
1720
+ def create_check_constraint(
1721
+ self,
1722
+ constraint_name: str,
1723
+ condition: Union[str, ColumnElement[bool], TextClause],
1724
+ **kw: Any,
1725
+ ) -> None:
1726
+ """Issue a "create check constraint" instruction using the
1727
+ current batch migration context.
1728
+
1729
+ The batch form of this call omits the ``source`` and ``schema``
1730
+ arguments from the call.
1731
+
1732
+ .. seealso::
1733
+
1734
+ :meth:`.Operations.create_check_constraint`
1735
+
1736
+ """ # noqa: E501
1737
+ ...
1738
+
1739
+ def create_exclude_constraint(
1740
+ self, constraint_name: str, *elements: Any, **kw: Any
1741
+ ) -> Optional[Table]:
1742
+ """Issue a "create exclude constraint" instruction using the
1743
+ current batch migration context.
1744
+
1745
+ .. note:: This method is Postgresql specific, and additionally
1746
+ requires at least SQLAlchemy 1.0.
1747
+
1748
+ .. seealso::
1749
+
1750
+ :meth:`.Operations.create_exclude_constraint`
1751
+
1752
+ """ # noqa: E501
1753
+ ...
1754
+
1755
+ def create_foreign_key(
1756
+ self,
1757
+ constraint_name: Optional[str],
1758
+ referent_table: str,
1759
+ local_cols: List[str],
1760
+ remote_cols: List[str],
1761
+ *,
1762
+ referent_schema: Optional[str] = None,
1763
+ onupdate: Optional[str] = None,
1764
+ ondelete: Optional[str] = None,
1765
+ deferrable: Optional[bool] = None,
1766
+ initially: Optional[str] = None,
1767
+ match: Optional[str] = None,
1768
+ **dialect_kw: Any,
1769
+ ) -> None:
1770
+ """Issue a "create foreign key" instruction using the
1771
+ current batch migration context.
1772
+
1773
+ The batch form of this call omits the ``source`` and ``source_schema``
1774
+ arguments from the call.
1775
+
1776
+ e.g.::
1777
+
1778
+ with batch_alter_table("address") as batch_op:
1779
+ batch_op.create_foreign_key(
1780
+ "fk_user_address",
1781
+ "user",
1782
+ ["user_id"],
1783
+ ["id"],
1784
+ )
1785
+
1786
+ .. seealso::
1787
+
1788
+ :meth:`.Operations.create_foreign_key`
1789
+
1790
+ """ # noqa: E501
1791
+ ...
1792
+
1793
+ def create_index(
1794
+ self, index_name: str, columns: List[str], **kw: Any
1795
+ ) -> None:
1796
+ """Issue a "create index" instruction using the
1797
+ current batch migration context.
1798
+
1799
+ .. seealso::
1800
+
1801
+ :meth:`.Operations.create_index`
1802
+
1803
+ """ # noqa: E501
1804
+ ...
1805
+
1806
+ def create_primary_key(
1807
+ self, constraint_name: Optional[str], columns: List[str]
1808
+ ) -> None:
1809
+ """Issue a "create primary key" instruction using the
1810
+ current batch migration context.
1811
+
1812
+ The batch form of this call omits the ``table_name`` and ``schema``
1813
+ arguments from the call.
1814
+
1815
+ .. seealso::
1816
+
1817
+ :meth:`.Operations.create_primary_key`
1818
+
1819
+ """ # noqa: E501
1820
+ ...
1821
+
1822
+ def create_table_comment(
1823
+ self,
1824
+ comment: Optional[str],
1825
+ *,
1826
+ existing_comment: Optional[str] = None,
1827
+ ) -> None:
1828
+ """Emit a COMMENT ON operation to set the comment for a table
1829
+ using the current batch migration context.
1830
+
1831
+ :param comment: string value of the comment being registered against
1832
+ the specified table.
1833
+ :param existing_comment: String value of a comment
1834
+ already registered on the specified table, used within autogenerate
1835
+ so that the operation is reversible, but not required for direct
1836
+ use.
1837
+
1838
+ """ # noqa: E501
1839
+ ...
1840
+
1841
+ def create_unique_constraint(
1842
+ self, constraint_name: str, columns: Sequence[str], **kw: Any
1843
+ ) -> Any:
1844
+ """Issue a "create unique constraint" instruction using the
1845
+ current batch migration context.
1846
+
1847
+ The batch form of this call omits the ``source`` and ``schema``
1848
+ arguments from the call.
1849
+
1850
+ .. seealso::
1851
+
1852
+ :meth:`.Operations.create_unique_constraint`
1853
+
1854
+ """ # noqa: E501
1855
+ ...
1856
+
1857
+ def drop_column(self, column_name: str, **kw: Any) -> None:
1858
+ """Issue a "drop column" instruction using the current
1859
+ batch migration context.
1860
+
1861
+ .. seealso::
1862
+
1863
+ :meth:`.Operations.drop_column`
1864
+
1865
+ """ # noqa: E501
1866
+ ...
1867
+
1868
+ def drop_constraint(
1869
+ self, constraint_name: str, type_: Optional[str] = None
1870
+ ) -> None:
1871
+ """Issue a "drop constraint" instruction using the
1872
+ current batch migration context.
1873
+
1874
+ The batch form of this call omits the ``table_name`` and ``schema``
1875
+ arguments from the call.
1876
+
1877
+ .. seealso::
1878
+
1879
+ :meth:`.Operations.drop_constraint`
1880
+
1881
+ """ # noqa: E501
1882
+ ...
1883
+
1884
+ def drop_index(self, index_name: str, **kw: Any) -> None:
1885
+ """Issue a "drop index" instruction using the
1886
+ current batch migration context.
1887
+
1888
+ .. seealso::
1889
+
1890
+ :meth:`.Operations.drop_index`
1891
+
1892
+ """ # noqa: E501
1893
+ ...
1894
+
1895
+ def drop_table_comment(
1896
+ self, *, existing_comment: Optional[str] = None
1897
+ ) -> None:
1898
+ """Issue a "drop table comment" operation to
1899
+ remove an existing comment set on a table using the current
1900
+ batch operations context.
1901
+
1902
+ :param existing_comment: An optional string value of a comment already
1903
+ registered on the specified table.
1904
+
1905
+ """ # noqa: E501
1906
+ ...
1907
+
1908
+ def execute(
1909
+ self,
1910
+ sqltext: Union[Executable, str],
1911
+ *,
1912
+ execution_options: Optional[dict[str, Any]] = None,
1913
+ ) -> None:
1914
+ """Execute the given SQL using the current migration context.
1915
+
1916
+ .. seealso::
1917
+
1918
+ :meth:`.Operations.execute`
1919
+
1920
+ """ # noqa: E501
1921
+ ...
1922
+
1923
+ # END STUB FUNCTIONS: batch_op
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/batch.py ADDED
@@ -0,0 +1,718 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ from typing import Any
7
+ from typing import Dict
8
+ from typing import List
9
+ from typing import Optional
10
+ from typing import Tuple
11
+ from typing import TYPE_CHECKING
12
+ from typing import Union
13
+
14
+ from sqlalchemy import CheckConstraint
15
+ from sqlalchemy import Column
16
+ from sqlalchemy import ForeignKeyConstraint
17
+ from sqlalchemy import Index
18
+ from sqlalchemy import MetaData
19
+ from sqlalchemy import PrimaryKeyConstraint
20
+ from sqlalchemy import schema as sql_schema
21
+ from sqlalchemy import select
22
+ from sqlalchemy import Table
23
+ from sqlalchemy import types as sqltypes
24
+ from sqlalchemy.sql.schema import SchemaEventTarget
25
+ from sqlalchemy.util import OrderedDict
26
+ from sqlalchemy.util import topological
27
+
28
+ from ..util import exc
29
+ from ..util.sqla_compat import _columns_for_constraint
30
+ from ..util.sqla_compat import _copy
31
+ from ..util.sqla_compat import _copy_expression
32
+ from ..util.sqla_compat import _ensure_scope_for_ddl
33
+ from ..util.sqla_compat import _fk_is_self_referential
34
+ from ..util.sqla_compat import _idx_table_bound_expressions
35
+ from ..util.sqla_compat import _is_type_bound
36
+ from ..util.sqla_compat import _remove_column_from_collection
37
+ from ..util.sqla_compat import _resolve_for_variant
38
+ from ..util.sqla_compat import constraint_name_defined
39
+ from ..util.sqla_compat import constraint_name_string
40
+
41
+ if TYPE_CHECKING:
42
+ from typing import Literal
43
+
44
+ from sqlalchemy.engine import Dialect
45
+ from sqlalchemy.sql.elements import ColumnClause
46
+ from sqlalchemy.sql.elements import quoted_name
47
+ from sqlalchemy.sql.functions import Function
48
+ from sqlalchemy.sql.schema import Constraint
49
+ from sqlalchemy.sql.type_api import TypeEngine
50
+
51
+ from ..ddl.impl import DefaultImpl
52
+
53
+
54
+ class BatchOperationsImpl:
55
+ def __init__(
56
+ self,
57
+ operations,
58
+ table_name,
59
+ schema,
60
+ recreate,
61
+ copy_from,
62
+ table_args,
63
+ table_kwargs,
64
+ reflect_args,
65
+ reflect_kwargs,
66
+ naming_convention,
67
+ partial_reordering,
68
+ ):
69
+ self.operations = operations
70
+ self.table_name = table_name
71
+ self.schema = schema
72
+ if recreate not in ("auto", "always", "never"):
73
+ raise ValueError(
74
+ "recreate may be one of 'auto', 'always', or 'never'."
75
+ )
76
+ self.recreate = recreate
77
+ self.copy_from = copy_from
78
+ self.table_args = table_args
79
+ self.table_kwargs = dict(table_kwargs)
80
+ self.reflect_args = reflect_args
81
+ self.reflect_kwargs = dict(reflect_kwargs)
82
+ self.reflect_kwargs.setdefault(
83
+ "listeners", list(self.reflect_kwargs.get("listeners", ()))
84
+ )
85
+ self.reflect_kwargs["listeners"].append(
86
+ ("column_reflect", operations.impl.autogen_column_reflect)
87
+ )
88
+ self.naming_convention = naming_convention
89
+ self.partial_reordering = partial_reordering
90
+ self.batch = []
91
+
92
+ @property
93
+ def dialect(self) -> Dialect:
94
+ return self.operations.impl.dialect
95
+
96
+ @property
97
+ def impl(self) -> DefaultImpl:
98
+ return self.operations.impl
99
+
100
+ def _should_recreate(self) -> bool:
101
+ if self.recreate == "auto":
102
+ return self.operations.impl.requires_recreate_in_batch(self)
103
+ elif self.recreate == "always":
104
+ return True
105
+ else:
106
+ return False
107
+
108
+ def flush(self) -> None:
109
+ should_recreate = self._should_recreate()
110
+
111
+ with _ensure_scope_for_ddl(self.impl.connection):
112
+ if not should_recreate:
113
+ for opname, arg, kw in self.batch:
114
+ fn = getattr(self.operations.impl, opname)
115
+ fn(*arg, **kw)
116
+ else:
117
+ if self.naming_convention:
118
+ m1 = MetaData(naming_convention=self.naming_convention)
119
+ else:
120
+ m1 = MetaData()
121
+
122
+ if self.copy_from is not None:
123
+ existing_table = self.copy_from
124
+ reflected = False
125
+ else:
126
+ if self.operations.migration_context.as_sql:
127
+ raise exc.CommandError(
128
+ f"This operation cannot proceed in --sql mode; "
129
+ f"batch mode with dialect "
130
+ f"{self.operations.migration_context.dialect.name} " # noqa: E501
131
+ f"requires a live database connection with which "
132
+ f'to reflect the table "{self.table_name}". '
133
+ f"To generate a batch SQL migration script using "
134
+ "table "
135
+ '"move and copy", a complete Table object '
136
+ f'should be passed to the "copy_from" argument '
137
+ "of the batch_alter_table() method so that table "
138
+ "reflection can be skipped."
139
+ )
140
+
141
+ existing_table = Table(
142
+ self.table_name,
143
+ m1,
144
+ schema=self.schema,
145
+ autoload_with=self.operations.get_bind(),
146
+ *self.reflect_args,
147
+ **self.reflect_kwargs,
148
+ )
149
+ reflected = True
150
+
151
+ batch_impl = ApplyBatchImpl(
152
+ self.impl,
153
+ existing_table,
154
+ self.table_args,
155
+ self.table_kwargs,
156
+ reflected,
157
+ partial_reordering=self.partial_reordering,
158
+ )
159
+ for opname, arg, kw in self.batch:
160
+ fn = getattr(batch_impl, opname)
161
+ fn(*arg, **kw)
162
+
163
+ batch_impl._create(self.impl)
164
+
165
+ def alter_column(self, *arg, **kw) -> None:
166
+ self.batch.append(("alter_column", arg, kw))
167
+
168
+ def add_column(self, *arg, **kw) -> None:
169
+ if (
170
+ "insert_before" in kw or "insert_after" in kw
171
+ ) and not self._should_recreate():
172
+ raise exc.CommandError(
173
+ "Can't specify insert_before or insert_after when using "
174
+ "ALTER; please specify recreate='always'"
175
+ )
176
+ self.batch.append(("add_column", arg, kw))
177
+
178
+ def drop_column(self, *arg, **kw) -> None:
179
+ self.batch.append(("drop_column", arg, kw))
180
+
181
+ def add_constraint(self, const: Constraint) -> None:
182
+ self.batch.append(("add_constraint", (const,), {}))
183
+
184
+ def drop_constraint(self, const: Constraint) -> None:
185
+ self.batch.append(("drop_constraint", (const,), {}))
186
+
187
+ def rename_table(self, *arg, **kw):
188
+ self.batch.append(("rename_table", arg, kw))
189
+
190
+ def create_index(self, idx: Index, **kw: Any) -> None:
191
+ self.batch.append(("create_index", (idx,), kw))
192
+
193
+ def drop_index(self, idx: Index, **kw: Any) -> None:
194
+ self.batch.append(("drop_index", (idx,), kw))
195
+
196
+ def create_table_comment(self, table):
197
+ self.batch.append(("create_table_comment", (table,), {}))
198
+
199
+ def drop_table_comment(self, table):
200
+ self.batch.append(("drop_table_comment", (table,), {}))
201
+
202
+ def create_table(self, table):
203
+ raise NotImplementedError("Can't create table in batch mode")
204
+
205
+ def drop_table(self, table):
206
+ raise NotImplementedError("Can't drop table in batch mode")
207
+
208
+ def create_column_comment(self, column):
209
+ self.batch.append(("create_column_comment", (column,), {}))
210
+
211
+
212
+ class ApplyBatchImpl:
213
+ def __init__(
214
+ self,
215
+ impl: DefaultImpl,
216
+ table: Table,
217
+ table_args: tuple,
218
+ table_kwargs: Dict[str, Any],
219
+ reflected: bool,
220
+ partial_reordering: tuple = (),
221
+ ) -> None:
222
+ self.impl = impl
223
+ self.table = table # this is a Table object
224
+ self.table_args = table_args
225
+ self.table_kwargs = table_kwargs
226
+ self.temp_table_name = self._calc_temp_name(table.name)
227
+ self.new_table: Optional[Table] = None
228
+
229
+ self.partial_reordering = partial_reordering # tuple of tuples
230
+ self.add_col_ordering: Tuple[
231
+ Tuple[str, str], ...
232
+ ] = () # tuple of tuples
233
+
234
+ self.column_transfers = OrderedDict(
235
+ (c.name, {"expr": c}) for c in self.table.c
236
+ )
237
+ self.existing_ordering = list(self.column_transfers)
238
+
239
+ self.reflected = reflected
240
+ self._grab_table_elements()
241
+
242
+ @classmethod
243
+ def _calc_temp_name(cls, tablename: Union[quoted_name, str]) -> str:
244
+ return ("_alembic_tmp_%s" % tablename)[0:50]
245
+
246
+ def _grab_table_elements(self) -> None:
247
+ schema = self.table.schema
248
+ self.columns: Dict[str, Column[Any]] = OrderedDict()
249
+ for c in self.table.c:
250
+ c_copy = _copy(c, schema=schema)
251
+ c_copy.unique = c_copy.index = False
252
+ # ensure that the type object was copied,
253
+ # as we may need to modify it in-place
254
+ if isinstance(c.type, SchemaEventTarget):
255
+ assert c_copy.type is not c.type
256
+ self.columns[c.name] = c_copy
257
+ self.named_constraints: Dict[str, Constraint] = {}
258
+ self.unnamed_constraints = []
259
+ self.col_named_constraints = {}
260
+ self.indexes: Dict[str, Index] = {}
261
+ self.new_indexes: Dict[str, Index] = {}
262
+
263
+ for const in self.table.constraints:
264
+ if _is_type_bound(const):
265
+ continue
266
+ elif (
267
+ self.reflected
268
+ and isinstance(const, CheckConstraint)
269
+ and not const.name
270
+ ):
271
+ # TODO: we are skipping unnamed reflected CheckConstraint
272
+ # because
273
+ # we have no way to determine _is_type_bound() for these.
274
+ pass
275
+ elif constraint_name_string(const.name):
276
+ self.named_constraints[const.name] = const
277
+ else:
278
+ self.unnamed_constraints.append(const)
279
+
280
+ if not self.reflected:
281
+ for col in self.table.c:
282
+ for const in col.constraints:
283
+ if const.name:
284
+ self.col_named_constraints[const.name] = (col, const)
285
+
286
+ for idx in self.table.indexes:
287
+ self.indexes[idx.name] = idx # type: ignore[index]
288
+
289
+ for k in self.table.kwargs:
290
+ self.table_kwargs.setdefault(k, self.table.kwargs[k])
291
+
292
+ def _adjust_self_columns_for_partial_reordering(self) -> None:
293
+ pairs = set()
294
+
295
+ col_by_idx = list(self.columns)
296
+
297
+ if self.partial_reordering:
298
+ for tuple_ in self.partial_reordering:
299
+ for index, elem in enumerate(tuple_):
300
+ if index > 0:
301
+ pairs.add((tuple_[index - 1], elem))
302
+ else:
303
+ for index, elem in enumerate(self.existing_ordering):
304
+ if index > 0:
305
+ pairs.add((col_by_idx[index - 1], elem))
306
+
307
+ pairs.update(self.add_col_ordering)
308
+
309
+ # this can happen if some columns were dropped and not removed
310
+ # from existing_ordering. this should be prevented already, but
311
+ # conservatively making sure this didn't happen
312
+ pairs_list = [p for p in pairs if p[0] != p[1]]
313
+
314
+ sorted_ = list(
315
+ topological.sort(pairs_list, col_by_idx, deterministic_order=True)
316
+ )
317
+ self.columns = OrderedDict((k, self.columns[k]) for k in sorted_)
318
+ self.column_transfers = OrderedDict(
319
+ (k, self.column_transfers[k]) for k in sorted_
320
+ )
321
+
322
+ def _transfer_elements_to_new_table(self) -> None:
323
+ assert self.new_table is None, "Can only create new table once"
324
+
325
+ m = MetaData()
326
+ schema = self.table.schema
327
+
328
+ if self.partial_reordering or self.add_col_ordering:
329
+ self._adjust_self_columns_for_partial_reordering()
330
+
331
+ self.new_table = new_table = Table(
332
+ self.temp_table_name,
333
+ m,
334
+ *(list(self.columns.values()) + list(self.table_args)),
335
+ schema=schema,
336
+ **self.table_kwargs,
337
+ )
338
+
339
+ for const in (
340
+ list(self.named_constraints.values()) + self.unnamed_constraints
341
+ ):
342
+ const_columns = {c.key for c in _columns_for_constraint(const)}
343
+
344
+ if not const_columns.issubset(self.column_transfers):
345
+ continue
346
+
347
+ const_copy: Constraint
348
+ if isinstance(const, ForeignKeyConstraint):
349
+ if _fk_is_self_referential(const):
350
+ # for self-referential constraint, refer to the
351
+ # *original* table name, and not _alembic_batch_temp.
352
+ # This is consistent with how we're handling
353
+ # FK constraints from other tables; we assume SQLite
354
+ # no foreign keys just keeps the names unchanged, so
355
+ # when we rename back, they match again.
356
+ const_copy = _copy(
357
+ const, schema=schema, target_table=self.table
358
+ )
359
+ else:
360
+ # "target_table" for ForeignKeyConstraint.copy() is
361
+ # only used if the FK is detected as being
362
+ # self-referential, which we are handling above.
363
+ const_copy = _copy(const, schema=schema)
364
+ else:
365
+ const_copy = _copy(
366
+ const, schema=schema, target_table=new_table
367
+ )
368
+ if isinstance(const, ForeignKeyConstraint):
369
+ self._setup_referent(m, const)
370
+ new_table.append_constraint(const_copy)
371
+
372
+ def _gather_indexes_from_both_tables(self) -> List[Index]:
373
+ assert self.new_table is not None
374
+ idx: List[Index] = []
375
+
376
+ for idx_existing in self.indexes.values():
377
+ # this is a lift-and-move from Table.to_metadata
378
+
379
+ if idx_existing._column_flag:
380
+ continue
381
+
382
+ idx_copy = Index(
383
+ idx_existing.name,
384
+ unique=idx_existing.unique,
385
+ *[
386
+ _copy_expression(expr, self.new_table)
387
+ for expr in _idx_table_bound_expressions(idx_existing)
388
+ ],
389
+ _table=self.new_table,
390
+ **idx_existing.kwargs,
391
+ )
392
+ idx.append(idx_copy)
393
+
394
+ for index in self.new_indexes.values():
395
+ idx.append(
396
+ Index(
397
+ index.name,
398
+ unique=index.unique,
399
+ *[self.new_table.c[col] for col in index.columns.keys()],
400
+ **index.kwargs,
401
+ )
402
+ )
403
+ return idx
404
+
405
+ def _setup_referent(
406
+ self, metadata: MetaData, constraint: ForeignKeyConstraint
407
+ ) -> None:
408
+ spec = constraint.elements[0]._get_colspec()
409
+ parts = spec.split(".")
410
+ tname = parts[-2]
411
+ if len(parts) == 3:
412
+ referent_schema = parts[0]
413
+ else:
414
+ referent_schema = None
415
+
416
+ if tname != self.temp_table_name:
417
+ key = sql_schema._get_table_key(tname, referent_schema)
418
+
419
+ def colspec(elem: Any):
420
+ return elem._get_colspec()
421
+
422
+ if key in metadata.tables:
423
+ t = metadata.tables[key]
424
+ for elem in constraint.elements:
425
+ colname = colspec(elem).split(".")[-1]
426
+ if colname not in t.c:
427
+ t.append_column(Column(colname, sqltypes.NULLTYPE))
428
+ else:
429
+ Table(
430
+ tname,
431
+ metadata,
432
+ *[
433
+ Column(n, sqltypes.NULLTYPE)
434
+ for n in [
435
+ colspec(elem).split(".")[-1]
436
+ for elem in constraint.elements
437
+ ]
438
+ ],
439
+ schema=referent_schema,
440
+ )
441
+
442
+ def _create(self, op_impl: DefaultImpl) -> None:
443
+ self._transfer_elements_to_new_table()
444
+
445
+ op_impl.prep_table_for_batch(self, self.table)
446
+ assert self.new_table is not None
447
+ op_impl.create_table(self.new_table)
448
+
449
+ try:
450
+ op_impl._exec(
451
+ self.new_table.insert()
452
+ .inline()
453
+ .from_select(
454
+ list(
455
+ k
456
+ for k, transfer in self.column_transfers.items()
457
+ if "expr" in transfer
458
+ ),
459
+ select(
460
+ *[
461
+ transfer["expr"]
462
+ for transfer in self.column_transfers.values()
463
+ if "expr" in transfer
464
+ ]
465
+ ),
466
+ )
467
+ )
468
+ op_impl.drop_table(self.table)
469
+ except:
470
+ op_impl.drop_table(self.new_table)
471
+ raise
472
+ else:
473
+ op_impl.rename_table(
474
+ self.temp_table_name, self.table.name, schema=self.table.schema
475
+ )
476
+ self.new_table.name = self.table.name
477
+ try:
478
+ for idx in self._gather_indexes_from_both_tables():
479
+ op_impl.create_index(idx)
480
+ finally:
481
+ self.new_table.name = self.temp_table_name
482
+
483
+ def alter_column(
484
+ self,
485
+ table_name: str,
486
+ column_name: str,
487
+ nullable: Optional[bool] = None,
488
+ server_default: Optional[Union[Function[Any], str, bool]] = False,
489
+ name: Optional[str] = None,
490
+ type_: Optional[TypeEngine] = None,
491
+ autoincrement: Optional[Union[bool, Literal["auto"]]] = None,
492
+ comment: Union[str, Literal[False]] = False,
493
+ **kw,
494
+ ) -> None:
495
+ existing = self.columns[column_name]
496
+ existing_transfer: Dict[str, Any] = self.column_transfers[column_name]
497
+ if name is not None and name != column_name:
498
+ # note that we don't change '.key' - we keep referring
499
+ # to the renamed column by its old key in _create(). neat!
500
+ existing.name = name
501
+ existing_transfer["name"] = name
502
+
503
+ existing_type = kw.get("existing_type", None)
504
+ if existing_type:
505
+ resolved_existing_type = _resolve_for_variant(
506
+ kw["existing_type"], self.impl.dialect
507
+ )
508
+
509
+ # pop named constraints for Boolean/Enum for rename
510
+ if (
511
+ isinstance(resolved_existing_type, SchemaEventTarget)
512
+ and resolved_existing_type.name # type:ignore[attr-defined] # noqa E501
513
+ ):
514
+ self.named_constraints.pop(
515
+ resolved_existing_type.name, # type:ignore[attr-defined] # noqa E501
516
+ None,
517
+ )
518
+
519
+ if type_ is not None:
520
+ type_ = sqltypes.to_instance(type_)
521
+ # old type is being discarded so turn off eventing
522
+ # rules. Alternatively we can
523
+ # erase the events set up by this type, but this is simpler.
524
+ # we also ignore the drop_constraint that will come here from
525
+ # Operations.implementation_for(alter_column)
526
+
527
+ if isinstance(existing.type, SchemaEventTarget):
528
+ existing.type._create_events = ( # type:ignore[attr-defined]
529
+ existing.type.create_constraint # type:ignore[attr-defined] # noqa
530
+ ) = False
531
+
532
+ self.impl.cast_for_batch_migrate(
533
+ existing, existing_transfer, type_
534
+ )
535
+
536
+ existing.type = type_
537
+
538
+ # we *dont* however set events for the new type, because
539
+ # alter_column is invoked from
540
+ # Operations.implementation_for(alter_column) which already
541
+ # will emit an add_constraint()
542
+
543
+ if nullable is not None:
544
+ existing.nullable = nullable
545
+ if server_default is not False:
546
+ if server_default is None:
547
+ existing.server_default = None
548
+ else:
549
+ sql_schema.DefaultClause(
550
+ server_default # type: ignore[arg-type]
551
+ )._set_parent(existing)
552
+ if autoincrement is not None:
553
+ existing.autoincrement = bool(autoincrement)
554
+
555
+ if comment is not False:
556
+ existing.comment = comment
557
+
558
+ def _setup_dependencies_for_add_column(
559
+ self,
560
+ colname: str,
561
+ insert_before: Optional[str],
562
+ insert_after: Optional[str],
563
+ ) -> None:
564
+ index_cols = self.existing_ordering
565
+ col_indexes = {name: i for i, name in enumerate(index_cols)}
566
+
567
+ if not self.partial_reordering:
568
+ if insert_after:
569
+ if not insert_before:
570
+ if insert_after in col_indexes:
571
+ # insert after an existing column
572
+ idx = col_indexes[insert_after] + 1
573
+ if idx < len(index_cols):
574
+ insert_before = index_cols[idx]
575
+ else:
576
+ # insert after a column that is also new
577
+ insert_before = dict(self.add_col_ordering)[
578
+ insert_after
579
+ ]
580
+ if insert_before:
581
+ if not insert_after:
582
+ if insert_before in col_indexes:
583
+ # insert before an existing column
584
+ idx = col_indexes[insert_before] - 1
585
+ if idx >= 0:
586
+ insert_after = index_cols[idx]
587
+ else:
588
+ # insert before a column that is also new
589
+ insert_after = {
590
+ b: a for a, b in self.add_col_ordering
591
+ }[insert_before]
592
+
593
+ if insert_before:
594
+ self.add_col_ordering += ((colname, insert_before),)
595
+ if insert_after:
596
+ self.add_col_ordering += ((insert_after, colname),)
597
+
598
+ if (
599
+ not self.partial_reordering
600
+ and not insert_before
601
+ and not insert_after
602
+ and col_indexes
603
+ ):
604
+ self.add_col_ordering += ((index_cols[-1], colname),)
605
+
606
+ def add_column(
607
+ self,
608
+ table_name: str,
609
+ column: Column[Any],
610
+ insert_before: Optional[str] = None,
611
+ insert_after: Optional[str] = None,
612
+ **kw,
613
+ ) -> None:
614
+ self._setup_dependencies_for_add_column(
615
+ column.name, insert_before, insert_after
616
+ )
617
+ # we copy the column because operations.add_column()
618
+ # gives us a Column that is part of a Table already.
619
+ self.columns[column.name] = _copy(column, schema=self.table.schema)
620
+ self.column_transfers[column.name] = {}
621
+
622
+ def drop_column(
623
+ self,
624
+ table_name: str,
625
+ column: Union[ColumnClause[Any], Column[Any]],
626
+ **kw,
627
+ ) -> None:
628
+ if column.name in self.table.primary_key.columns:
629
+ _remove_column_from_collection(
630
+ self.table.primary_key.columns, column
631
+ )
632
+ del self.columns[column.name]
633
+ del self.column_transfers[column.name]
634
+ self.existing_ordering.remove(column.name)
635
+
636
+ # pop named constraints for Boolean/Enum for rename
637
+ if (
638
+ "existing_type" in kw
639
+ and isinstance(kw["existing_type"], SchemaEventTarget)
640
+ and kw["existing_type"].name # type:ignore[attr-defined]
641
+ ):
642
+ self.named_constraints.pop(
643
+ kw["existing_type"].name, None # type:ignore[attr-defined]
644
+ )
645
+
646
+ def create_column_comment(self, column):
647
+ """the batch table creation function will issue create_column_comment
648
+ on the real "impl" as part of the create table process.
649
+
650
+ That is, the Column object will have the comment on it already,
651
+ so when it is received by add_column() it will be a normal part of
652
+ the CREATE TABLE and doesn't need an extra step here.
653
+
654
+ """
655
+
656
+ def create_table_comment(self, table):
657
+ """the batch table creation function will issue create_table_comment
658
+ on the real "impl" as part of the create table process.
659
+
660
+ """
661
+
662
+ def drop_table_comment(self, table):
663
+ """the batch table creation function will issue drop_table_comment
664
+ on the real "impl" as part of the create table process.
665
+
666
+ """
667
+
668
+ def add_constraint(self, const: Constraint) -> None:
669
+ if not constraint_name_defined(const.name):
670
+ raise ValueError("Constraint must have a name")
671
+ if isinstance(const, sql_schema.PrimaryKeyConstraint):
672
+ if self.table.primary_key in self.unnamed_constraints:
673
+ self.unnamed_constraints.remove(self.table.primary_key)
674
+
675
+ if constraint_name_string(const.name):
676
+ self.named_constraints[const.name] = const
677
+ else:
678
+ self.unnamed_constraints.append(const)
679
+
680
+ def drop_constraint(self, const: Constraint) -> None:
681
+ if not const.name:
682
+ raise ValueError("Constraint must have a name")
683
+ try:
684
+ if const.name in self.col_named_constraints:
685
+ col, const = self.col_named_constraints.pop(const.name)
686
+
687
+ for col_const in list(self.columns[col.name].constraints):
688
+ if col_const.name == const.name:
689
+ self.columns[col.name].constraints.remove(col_const)
690
+ elif constraint_name_string(const.name):
691
+ const = self.named_constraints.pop(const.name)
692
+ elif const in self.unnamed_constraints:
693
+ self.unnamed_constraints.remove(const)
694
+
695
+ except KeyError:
696
+ if _is_type_bound(const):
697
+ # type-bound constraints are only included in the new
698
+ # table via their type object in any case, so ignore the
699
+ # drop_constraint() that comes here via the
700
+ # Operations.implementation_for(alter_column)
701
+ return
702
+ raise ValueError("No such constraint: '%s'" % const.name)
703
+ else:
704
+ if isinstance(const, PrimaryKeyConstraint):
705
+ for col in const.columns:
706
+ self.columns[col.name].primary_key = False
707
+
708
+ def create_index(self, idx: Index) -> None:
709
+ self.new_indexes[idx.name] = idx # type: ignore[index]
710
+
711
+ def drop_index(self, idx: Index) -> None:
712
+ try:
713
+ del self.indexes[idx.name] # type: ignore[arg-type]
714
+ except KeyError:
715
+ raise ValueError("No such index: '%s'" % idx.name)
716
+
717
+ def rename_table(self, *arg, **kw):
718
+ raise NotImplementedError("TODO")
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/ops.py ADDED
@@ -0,0 +1,2842 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from abc import abstractmethod
4
+ import os
5
+ import pathlib
6
+ import re
7
+ from typing import Any
8
+ from typing import Callable
9
+ from typing import cast
10
+ from typing import Dict
11
+ from typing import FrozenSet
12
+ from typing import Iterator
13
+ from typing import List
14
+ from typing import MutableMapping
15
+ from typing import Optional
16
+ from typing import Sequence
17
+ from typing import Set
18
+ from typing import Tuple
19
+ from typing import Type
20
+ from typing import TYPE_CHECKING
21
+ from typing import TypeVar
22
+ from typing import Union
23
+
24
+ from sqlalchemy.types import NULLTYPE
25
+
26
+ from . import schemaobj
27
+ from .base import BatchOperations
28
+ from .base import Operations
29
+ from .. import util
30
+ from ..util import sqla_compat
31
+
32
+ if TYPE_CHECKING:
33
+ from typing import Literal
34
+
35
+ from sqlalchemy.sql import Executable
36
+ from sqlalchemy.sql.elements import ColumnElement
37
+ from sqlalchemy.sql.elements import conv
38
+ from sqlalchemy.sql.elements import quoted_name
39
+ from sqlalchemy.sql.elements import TextClause
40
+ from sqlalchemy.sql.schema import CheckConstraint
41
+ from sqlalchemy.sql.schema import Column
42
+ from sqlalchemy.sql.schema import Computed
43
+ from sqlalchemy.sql.schema import Constraint
44
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
45
+ from sqlalchemy.sql.schema import Identity
46
+ from sqlalchemy.sql.schema import Index
47
+ from sqlalchemy.sql.schema import MetaData
48
+ from sqlalchemy.sql.schema import PrimaryKeyConstraint
49
+ from sqlalchemy.sql.schema import SchemaItem
50
+ from sqlalchemy.sql.schema import Table
51
+ from sqlalchemy.sql.schema import UniqueConstraint
52
+ from sqlalchemy.sql.selectable import TableClause
53
+ from sqlalchemy.sql.type_api import TypeEngine
54
+
55
+ from ..autogenerate.rewriter import Rewriter
56
+ from ..runtime.migration import MigrationContext
57
+ from ..script.revision import _RevIdType
58
+
59
+ _T = TypeVar("_T", bound=Any)
60
+ _AC = TypeVar("_AC", bound="AddConstraintOp")
61
+
62
+
63
+ class MigrateOperation:
64
+ """base class for migration command and organization objects.
65
+
66
+ This system is part of the operation extensibility API.
67
+
68
+ .. seealso::
69
+
70
+ :ref:`operation_objects`
71
+
72
+ :ref:`operation_plugins`
73
+
74
+ :ref:`customizing_revision`
75
+
76
+ """
77
+
78
+ @util.memoized_property
79
+ def info(self) -> Dict[Any, Any]:
80
+ """A dictionary that may be used to store arbitrary information
81
+ along with this :class:`.MigrateOperation` object.
82
+
83
+ """
84
+ return {}
85
+
86
+ _mutations: FrozenSet[Rewriter] = frozenset()
87
+
88
+ def reverse(self) -> MigrateOperation:
89
+ raise NotImplementedError
90
+
91
+ def to_diff_tuple(self) -> Tuple[Any, ...]:
92
+ raise NotImplementedError
93
+
94
+
95
+ class AddConstraintOp(MigrateOperation):
96
+ """Represent an add constraint operation."""
97
+
98
+ add_constraint_ops = util.Dispatcher()
99
+
100
+ @property
101
+ def constraint_type(self) -> str:
102
+ raise NotImplementedError()
103
+
104
+ @classmethod
105
+ def register_add_constraint(
106
+ cls, type_: str
107
+ ) -> Callable[[Type[_AC]], Type[_AC]]:
108
+ def go(klass: Type[_AC]) -> Type[_AC]:
109
+ cls.add_constraint_ops.dispatch_for(type_)(klass.from_constraint)
110
+ return klass
111
+
112
+ return go
113
+
114
+ @classmethod
115
+ def from_constraint(cls, constraint: Constraint) -> AddConstraintOp:
116
+ return cls.add_constraint_ops.dispatch(constraint.__visit_name__)( # type: ignore[no-any-return] # noqa: E501
117
+ constraint
118
+ )
119
+
120
+ @abstractmethod
121
+ def to_constraint(
122
+ self, migration_context: Optional[MigrationContext] = None
123
+ ) -> Constraint:
124
+ pass
125
+
126
+ def reverse(self) -> DropConstraintOp:
127
+ return DropConstraintOp.from_constraint(self.to_constraint())
128
+
129
+ def to_diff_tuple(self) -> Tuple[str, Constraint]:
130
+ return ("add_constraint", self.to_constraint())
131
+
132
+
133
+ @Operations.register_operation("drop_constraint")
134
+ @BatchOperations.register_operation("drop_constraint", "batch_drop_constraint")
135
+ class DropConstraintOp(MigrateOperation):
136
+ """Represent a drop constraint operation."""
137
+
138
+ def __init__(
139
+ self,
140
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
141
+ table_name: str,
142
+ type_: Optional[str] = None,
143
+ *,
144
+ schema: Optional[str] = None,
145
+ if_exists: Optional[bool] = None,
146
+ _reverse: Optional[AddConstraintOp] = None,
147
+ ) -> None:
148
+ self.constraint_name = constraint_name
149
+ self.table_name = table_name
150
+ self.constraint_type = type_
151
+ self.schema = schema
152
+ self.if_exists = if_exists
153
+ self._reverse = _reverse
154
+
155
+ def reverse(self) -> AddConstraintOp:
156
+ return AddConstraintOp.from_constraint(self.to_constraint())
157
+
158
+ def to_diff_tuple(
159
+ self,
160
+ ) -> Tuple[str, SchemaItem]:
161
+ if self.constraint_type == "foreignkey":
162
+ return ("remove_fk", self.to_constraint())
163
+ else:
164
+ return ("remove_constraint", self.to_constraint())
165
+
166
+ @classmethod
167
+ def from_constraint(cls, constraint: Constraint) -> DropConstraintOp:
168
+ types = {
169
+ "unique_constraint": "unique",
170
+ "foreign_key_constraint": "foreignkey",
171
+ "primary_key_constraint": "primary",
172
+ "check_constraint": "check",
173
+ "column_check_constraint": "check",
174
+ "table_or_column_check_constraint": "check",
175
+ }
176
+
177
+ constraint_table = sqla_compat._table_for_constraint(constraint)
178
+ return cls(
179
+ sqla_compat.constraint_name_or_none(constraint.name),
180
+ constraint_table.name,
181
+ schema=constraint_table.schema,
182
+ type_=types.get(constraint.__visit_name__),
183
+ _reverse=AddConstraintOp.from_constraint(constraint),
184
+ )
185
+
186
+ def to_constraint(self) -> Constraint:
187
+ if self._reverse is not None:
188
+ constraint = self._reverse.to_constraint()
189
+ constraint.name = self.constraint_name
190
+ constraint_table = sqla_compat._table_for_constraint(constraint)
191
+ constraint_table.name = self.table_name
192
+ constraint_table.schema = self.schema
193
+
194
+ return constraint
195
+ else:
196
+ raise ValueError(
197
+ "constraint cannot be produced; "
198
+ "original constraint is not present"
199
+ )
200
+
201
+ @classmethod
202
+ def drop_constraint(
203
+ cls,
204
+ operations: Operations,
205
+ constraint_name: str,
206
+ table_name: str,
207
+ type_: Optional[str] = None,
208
+ *,
209
+ schema: Optional[str] = None,
210
+ if_exists: Optional[bool] = None,
211
+ ) -> None:
212
+ r"""Drop a constraint of the given name, typically via DROP CONSTRAINT.
213
+
214
+ :param constraint_name: name of the constraint.
215
+ :param table_name: table name.
216
+ :param type\_: optional, required on MySQL. can be
217
+ 'foreignkey', 'primary', 'unique', or 'check'.
218
+ :param schema: Optional schema name to operate within. To control
219
+ quoting of the schema outside of the default behavior, use
220
+ the SQLAlchemy construct
221
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
222
+ :param if_exists: If True, adds IF EXISTS operator when
223
+ dropping the constraint
224
+
225
+ .. versionadded:: 1.16.0
226
+
227
+ """
228
+
229
+ op = cls(
230
+ constraint_name,
231
+ table_name,
232
+ type_=type_,
233
+ schema=schema,
234
+ if_exists=if_exists,
235
+ )
236
+ return operations.invoke(op)
237
+
238
+ @classmethod
239
+ def batch_drop_constraint(
240
+ cls,
241
+ operations: BatchOperations,
242
+ constraint_name: str,
243
+ type_: Optional[str] = None,
244
+ ) -> None:
245
+ """Issue a "drop constraint" instruction using the
246
+ current batch migration context.
247
+
248
+ The batch form of this call omits the ``table_name`` and ``schema``
249
+ arguments from the call.
250
+
251
+ .. seealso::
252
+
253
+ :meth:`.Operations.drop_constraint`
254
+
255
+ """
256
+ op = cls(
257
+ constraint_name,
258
+ operations.impl.table_name,
259
+ type_=type_,
260
+ schema=operations.impl.schema,
261
+ )
262
+ return operations.invoke(op)
263
+
264
+
265
+ @Operations.register_operation("create_primary_key")
266
+ @BatchOperations.register_operation(
267
+ "create_primary_key", "batch_create_primary_key"
268
+ )
269
+ @AddConstraintOp.register_add_constraint("primary_key_constraint")
270
+ class CreatePrimaryKeyOp(AddConstraintOp):
271
+ """Represent a create primary key operation."""
272
+
273
+ constraint_type = "primarykey"
274
+
275
+ def __init__(
276
+ self,
277
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
278
+ table_name: str,
279
+ columns: Sequence[str],
280
+ *,
281
+ schema: Optional[str] = None,
282
+ **kw: Any,
283
+ ) -> None:
284
+ self.constraint_name = constraint_name
285
+ self.table_name = table_name
286
+ self.columns = columns
287
+ self.schema = schema
288
+ self.kw = kw
289
+
290
+ @classmethod
291
+ def from_constraint(cls, constraint: Constraint) -> CreatePrimaryKeyOp:
292
+ constraint_table = sqla_compat._table_for_constraint(constraint)
293
+ pk_constraint = cast("PrimaryKeyConstraint", constraint)
294
+ return cls(
295
+ sqla_compat.constraint_name_or_none(pk_constraint.name),
296
+ constraint_table.name,
297
+ pk_constraint.columns.keys(),
298
+ schema=constraint_table.schema,
299
+ **pk_constraint.dialect_kwargs,
300
+ )
301
+
302
+ def to_constraint(
303
+ self, migration_context: Optional[MigrationContext] = None
304
+ ) -> PrimaryKeyConstraint:
305
+ schema_obj = schemaobj.SchemaObjects(migration_context)
306
+
307
+ return schema_obj.primary_key_constraint(
308
+ self.constraint_name,
309
+ self.table_name,
310
+ self.columns,
311
+ schema=self.schema,
312
+ **self.kw,
313
+ )
314
+
315
+ @classmethod
316
+ def create_primary_key(
317
+ cls,
318
+ operations: Operations,
319
+ constraint_name: Optional[str],
320
+ table_name: str,
321
+ columns: List[str],
322
+ *,
323
+ schema: Optional[str] = None,
324
+ ) -> None:
325
+ """Issue a "create primary key" instruction using the current
326
+ migration context.
327
+
328
+ e.g.::
329
+
330
+ from alembic import op
331
+
332
+ op.create_primary_key("pk_my_table", "my_table", ["id", "version"])
333
+
334
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
335
+ containing the necessary columns, then generates a new
336
+ :class:`~sqlalchemy.schema.PrimaryKeyConstraint`
337
+ object which it then associates with the
338
+ :class:`~sqlalchemy.schema.Table`.
339
+ Any event listeners associated with this action will be fired
340
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
341
+ construct is ultimately used to generate the ALTER statement.
342
+
343
+ :param constraint_name: Name of the primary key constraint. The name
344
+ is necessary so that an ALTER statement can be emitted. For setups
345
+ that use an automated naming scheme such as that described at
346
+ :ref:`sqla:constraint_naming_conventions`
347
+ ``name`` here can be ``None``, as the event listener will
348
+ apply the name to the constraint object when it is associated
349
+ with the table.
350
+ :param table_name: String name of the target table.
351
+ :param columns: a list of string column names to be applied to the
352
+ primary key constraint.
353
+ :param schema: Optional schema name to operate within. To control
354
+ quoting of the schema outside of the default behavior, use
355
+ the SQLAlchemy construct
356
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
357
+
358
+ """
359
+ op = cls(constraint_name, table_name, columns, schema=schema)
360
+ return operations.invoke(op)
361
+
362
+ @classmethod
363
+ def batch_create_primary_key(
364
+ cls,
365
+ operations: BatchOperations,
366
+ constraint_name: Optional[str],
367
+ columns: List[str],
368
+ ) -> None:
369
+ """Issue a "create primary key" instruction using the
370
+ current batch migration context.
371
+
372
+ The batch form of this call omits the ``table_name`` and ``schema``
373
+ arguments from the call.
374
+
375
+ .. seealso::
376
+
377
+ :meth:`.Operations.create_primary_key`
378
+
379
+ """
380
+ op = cls(
381
+ constraint_name,
382
+ operations.impl.table_name,
383
+ columns,
384
+ schema=operations.impl.schema,
385
+ )
386
+ return operations.invoke(op)
387
+
388
+
389
+ @Operations.register_operation("create_unique_constraint")
390
+ @BatchOperations.register_operation(
391
+ "create_unique_constraint", "batch_create_unique_constraint"
392
+ )
393
+ @AddConstraintOp.register_add_constraint("unique_constraint")
394
+ class CreateUniqueConstraintOp(AddConstraintOp):
395
+ """Represent a create unique constraint operation."""
396
+
397
+ constraint_type = "unique"
398
+
399
+ def __init__(
400
+ self,
401
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
402
+ table_name: str,
403
+ columns: Sequence[str],
404
+ *,
405
+ schema: Optional[str] = None,
406
+ **kw: Any,
407
+ ) -> None:
408
+ self.constraint_name = constraint_name
409
+ self.table_name = table_name
410
+ self.columns = columns
411
+ self.schema = schema
412
+ self.kw = kw
413
+
414
+ @classmethod
415
+ def from_constraint(
416
+ cls, constraint: Constraint
417
+ ) -> CreateUniqueConstraintOp:
418
+ constraint_table = sqla_compat._table_for_constraint(constraint)
419
+
420
+ uq_constraint = cast("UniqueConstraint", constraint)
421
+
422
+ kw: Dict[str, Any] = {}
423
+ if uq_constraint.deferrable:
424
+ kw["deferrable"] = uq_constraint.deferrable
425
+ if uq_constraint.initially:
426
+ kw["initially"] = uq_constraint.initially
427
+ kw.update(uq_constraint.dialect_kwargs)
428
+ return cls(
429
+ sqla_compat.constraint_name_or_none(uq_constraint.name),
430
+ constraint_table.name,
431
+ [c.name for c in uq_constraint.columns],
432
+ schema=constraint_table.schema,
433
+ **kw,
434
+ )
435
+
436
+ def to_constraint(
437
+ self, migration_context: Optional[MigrationContext] = None
438
+ ) -> UniqueConstraint:
439
+ schema_obj = schemaobj.SchemaObjects(migration_context)
440
+ return schema_obj.unique_constraint(
441
+ self.constraint_name,
442
+ self.table_name,
443
+ self.columns,
444
+ schema=self.schema,
445
+ **self.kw,
446
+ )
447
+
448
+ @classmethod
449
+ def create_unique_constraint(
450
+ cls,
451
+ operations: Operations,
452
+ constraint_name: Optional[str],
453
+ table_name: str,
454
+ columns: Sequence[str],
455
+ *,
456
+ schema: Optional[str] = None,
457
+ **kw: Any,
458
+ ) -> Any:
459
+ """Issue a "create unique constraint" instruction using the
460
+ current migration context.
461
+
462
+ e.g.::
463
+
464
+ from alembic import op
465
+ op.create_unique_constraint("uq_user_name", "user", ["name"])
466
+
467
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
468
+ containing the necessary columns, then generates a new
469
+ :class:`~sqlalchemy.schema.UniqueConstraint`
470
+ object which it then associates with the
471
+ :class:`~sqlalchemy.schema.Table`.
472
+ Any event listeners associated with this action will be fired
473
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
474
+ construct is ultimately used to generate the ALTER statement.
475
+
476
+ :param name: Name of the unique constraint. The name is necessary
477
+ so that an ALTER statement can be emitted. For setups that
478
+ use an automated naming scheme such as that described at
479
+ :ref:`sqla:constraint_naming_conventions`,
480
+ ``name`` here can be ``None``, as the event listener will
481
+ apply the name to the constraint object when it is associated
482
+ with the table.
483
+ :param table_name: String name of the source table.
484
+ :param columns: a list of string column names in the
485
+ source table.
486
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
487
+ NOT DEFERRABLE when issuing DDL for this constraint.
488
+ :param initially: optional string. If set, emit INITIALLY <value>
489
+ when issuing DDL for this constraint.
490
+ :param schema: Optional schema name to operate within. To control
491
+ quoting of the schema outside of the default behavior, use
492
+ the SQLAlchemy construct
493
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
494
+
495
+ """
496
+
497
+ op = cls(constraint_name, table_name, columns, schema=schema, **kw)
498
+ return operations.invoke(op)
499
+
500
+ @classmethod
501
+ def batch_create_unique_constraint(
502
+ cls,
503
+ operations: BatchOperations,
504
+ constraint_name: str,
505
+ columns: Sequence[str],
506
+ **kw: Any,
507
+ ) -> Any:
508
+ """Issue a "create unique constraint" instruction using the
509
+ current batch migration context.
510
+
511
+ The batch form of this call omits the ``source`` and ``schema``
512
+ arguments from the call.
513
+
514
+ .. seealso::
515
+
516
+ :meth:`.Operations.create_unique_constraint`
517
+
518
+ """
519
+ kw["schema"] = operations.impl.schema
520
+ op = cls(constraint_name, operations.impl.table_name, columns, **kw)
521
+ return operations.invoke(op)
522
+
523
+
524
+ @Operations.register_operation("create_foreign_key")
525
+ @BatchOperations.register_operation(
526
+ "create_foreign_key", "batch_create_foreign_key"
527
+ )
528
+ @AddConstraintOp.register_add_constraint("foreign_key_constraint")
529
+ class CreateForeignKeyOp(AddConstraintOp):
530
+ """Represent a create foreign key constraint operation."""
531
+
532
+ constraint_type = "foreignkey"
533
+
534
+ def __init__(
535
+ self,
536
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
537
+ source_table: str,
538
+ referent_table: str,
539
+ local_cols: List[str],
540
+ remote_cols: List[str],
541
+ **kw: Any,
542
+ ) -> None:
543
+ self.constraint_name = constraint_name
544
+ self.source_table = source_table
545
+ self.referent_table = referent_table
546
+ self.local_cols = local_cols
547
+ self.remote_cols = remote_cols
548
+ self.kw = kw
549
+
550
+ def to_diff_tuple(self) -> Tuple[str, ForeignKeyConstraint]:
551
+ return ("add_fk", self.to_constraint())
552
+
553
+ @classmethod
554
+ def from_constraint(cls, constraint: Constraint) -> CreateForeignKeyOp:
555
+ fk_constraint = cast("ForeignKeyConstraint", constraint)
556
+ kw: Dict[str, Any] = {}
557
+ if fk_constraint.onupdate:
558
+ kw["onupdate"] = fk_constraint.onupdate
559
+ if fk_constraint.ondelete:
560
+ kw["ondelete"] = fk_constraint.ondelete
561
+ if fk_constraint.initially:
562
+ kw["initially"] = fk_constraint.initially
563
+ if fk_constraint.deferrable:
564
+ kw["deferrable"] = fk_constraint.deferrable
565
+ if fk_constraint.use_alter:
566
+ kw["use_alter"] = fk_constraint.use_alter
567
+ if fk_constraint.match:
568
+ kw["match"] = fk_constraint.match
569
+
570
+ (
571
+ source_schema,
572
+ source_table,
573
+ source_columns,
574
+ target_schema,
575
+ target_table,
576
+ target_columns,
577
+ onupdate,
578
+ ondelete,
579
+ deferrable,
580
+ initially,
581
+ ) = sqla_compat._fk_spec(fk_constraint)
582
+
583
+ kw["source_schema"] = source_schema
584
+ kw["referent_schema"] = target_schema
585
+ kw.update(fk_constraint.dialect_kwargs)
586
+ return cls(
587
+ sqla_compat.constraint_name_or_none(fk_constraint.name),
588
+ source_table,
589
+ target_table,
590
+ source_columns,
591
+ target_columns,
592
+ **kw,
593
+ )
594
+
595
+ def to_constraint(
596
+ self, migration_context: Optional[MigrationContext] = None
597
+ ) -> ForeignKeyConstraint:
598
+ schema_obj = schemaobj.SchemaObjects(migration_context)
599
+ return schema_obj.foreign_key_constraint(
600
+ self.constraint_name,
601
+ self.source_table,
602
+ self.referent_table,
603
+ self.local_cols,
604
+ self.remote_cols,
605
+ **self.kw,
606
+ )
607
+
608
+ @classmethod
609
+ def create_foreign_key(
610
+ cls,
611
+ operations: Operations,
612
+ constraint_name: Optional[str],
613
+ source_table: str,
614
+ referent_table: str,
615
+ local_cols: List[str],
616
+ remote_cols: List[str],
617
+ *,
618
+ onupdate: Optional[str] = None,
619
+ ondelete: Optional[str] = None,
620
+ deferrable: Optional[bool] = None,
621
+ initially: Optional[str] = None,
622
+ match: Optional[str] = None,
623
+ source_schema: Optional[str] = None,
624
+ referent_schema: Optional[str] = None,
625
+ **dialect_kw: Any,
626
+ ) -> None:
627
+ """Issue a "create foreign key" instruction using the
628
+ current migration context.
629
+
630
+ e.g.::
631
+
632
+ from alembic import op
633
+
634
+ op.create_foreign_key(
635
+ "fk_user_address",
636
+ "address",
637
+ "user",
638
+ ["user_id"],
639
+ ["id"],
640
+ )
641
+
642
+ This internally generates a :class:`~sqlalchemy.schema.Table` object
643
+ containing the necessary columns, then generates a new
644
+ :class:`~sqlalchemy.schema.ForeignKeyConstraint`
645
+ object which it then associates with the
646
+ :class:`~sqlalchemy.schema.Table`.
647
+ Any event listeners associated with this action will be fired
648
+ off normally. The :class:`~sqlalchemy.schema.AddConstraint`
649
+ construct is ultimately used to generate the ALTER statement.
650
+
651
+ :param constraint_name: Name of the foreign key constraint. The name
652
+ is necessary so that an ALTER statement can be emitted. For setups
653
+ that use an automated naming scheme such as that described at
654
+ :ref:`sqla:constraint_naming_conventions`,
655
+ ``name`` here can be ``None``, as the event listener will
656
+ apply the name to the constraint object when it is associated
657
+ with the table.
658
+ :param source_table: String name of the source table.
659
+ :param referent_table: String name of the destination table.
660
+ :param local_cols: a list of string column names in the
661
+ source table.
662
+ :param remote_cols: a list of string column names in the
663
+ remote table.
664
+ :param onupdate: Optional string. If set, emit ON UPDATE <value> when
665
+ issuing DDL for this constraint. Typical values include CASCADE,
666
+ DELETE and RESTRICT.
667
+ :param ondelete: Optional string. If set, emit ON DELETE <value> when
668
+ issuing DDL for this constraint. Typical values include CASCADE,
669
+ DELETE and RESTRICT.
670
+ :param deferrable: optional bool. If set, emit DEFERRABLE or NOT
671
+ DEFERRABLE when issuing DDL for this constraint.
672
+ :param source_schema: Optional schema name of the source table.
673
+ :param referent_schema: Optional schema name of the destination table.
674
+
675
+ """
676
+
677
+ op = cls(
678
+ constraint_name,
679
+ source_table,
680
+ referent_table,
681
+ local_cols,
682
+ remote_cols,
683
+ onupdate=onupdate,
684
+ ondelete=ondelete,
685
+ deferrable=deferrable,
686
+ source_schema=source_schema,
687
+ referent_schema=referent_schema,
688
+ initially=initially,
689
+ match=match,
690
+ **dialect_kw,
691
+ )
692
+ return operations.invoke(op)
693
+
694
+ @classmethod
695
+ def batch_create_foreign_key(
696
+ cls,
697
+ operations: BatchOperations,
698
+ constraint_name: Optional[str],
699
+ referent_table: str,
700
+ local_cols: List[str],
701
+ remote_cols: List[str],
702
+ *,
703
+ referent_schema: Optional[str] = None,
704
+ onupdate: Optional[str] = None,
705
+ ondelete: Optional[str] = None,
706
+ deferrable: Optional[bool] = None,
707
+ initially: Optional[str] = None,
708
+ match: Optional[str] = None,
709
+ **dialect_kw: Any,
710
+ ) -> None:
711
+ """Issue a "create foreign key" instruction using the
712
+ current batch migration context.
713
+
714
+ The batch form of this call omits the ``source`` and ``source_schema``
715
+ arguments from the call.
716
+
717
+ e.g.::
718
+
719
+ with batch_alter_table("address") as batch_op:
720
+ batch_op.create_foreign_key(
721
+ "fk_user_address",
722
+ "user",
723
+ ["user_id"],
724
+ ["id"],
725
+ )
726
+
727
+ .. seealso::
728
+
729
+ :meth:`.Operations.create_foreign_key`
730
+
731
+ """
732
+ op = cls(
733
+ constraint_name,
734
+ operations.impl.table_name,
735
+ referent_table,
736
+ local_cols,
737
+ remote_cols,
738
+ onupdate=onupdate,
739
+ ondelete=ondelete,
740
+ deferrable=deferrable,
741
+ source_schema=operations.impl.schema,
742
+ referent_schema=referent_schema,
743
+ initially=initially,
744
+ match=match,
745
+ **dialect_kw,
746
+ )
747
+ return operations.invoke(op)
748
+
749
+
750
+ @Operations.register_operation("create_check_constraint")
751
+ @BatchOperations.register_operation(
752
+ "create_check_constraint", "batch_create_check_constraint"
753
+ )
754
+ @AddConstraintOp.register_add_constraint("check_constraint")
755
+ @AddConstraintOp.register_add_constraint("table_or_column_check_constraint")
756
+ @AddConstraintOp.register_add_constraint("column_check_constraint")
757
+ class CreateCheckConstraintOp(AddConstraintOp):
758
+ """Represent a create check constraint operation."""
759
+
760
+ constraint_type = "check"
761
+
762
+ def __init__(
763
+ self,
764
+ constraint_name: Optional[sqla_compat._ConstraintNameDefined],
765
+ table_name: str,
766
+ condition: Union[str, TextClause, ColumnElement[Any]],
767
+ *,
768
+ schema: Optional[str] = None,
769
+ **kw: Any,
770
+ ) -> None:
771
+ self.constraint_name = constraint_name
772
+ self.table_name = table_name
773
+ self.condition = condition
774
+ self.schema = schema
775
+ self.kw = kw
776
+
777
+ @classmethod
778
+ def from_constraint(
779
+ cls, constraint: Constraint
780
+ ) -> CreateCheckConstraintOp:
781
+ constraint_table = sqla_compat._table_for_constraint(constraint)
782
+
783
+ ck_constraint = cast("CheckConstraint", constraint)
784
+ return cls(
785
+ sqla_compat.constraint_name_or_none(ck_constraint.name),
786
+ constraint_table.name,
787
+ cast("ColumnElement[Any]", ck_constraint.sqltext),
788
+ schema=constraint_table.schema,
789
+ **ck_constraint.dialect_kwargs,
790
+ )
791
+
792
+ def to_constraint(
793
+ self, migration_context: Optional[MigrationContext] = None
794
+ ) -> CheckConstraint:
795
+ schema_obj = schemaobj.SchemaObjects(migration_context)
796
+ return schema_obj.check_constraint(
797
+ self.constraint_name,
798
+ self.table_name,
799
+ self.condition,
800
+ schema=self.schema,
801
+ **self.kw,
802
+ )
803
+
804
+ @classmethod
805
+ def create_check_constraint(
806
+ cls,
807
+ operations: Operations,
808
+ constraint_name: Optional[str],
809
+ table_name: str,
810
+ condition: Union[str, ColumnElement[bool], TextClause],
811
+ *,
812
+ schema: Optional[str] = None,
813
+ **kw: Any,
814
+ ) -> None:
815
+ """Issue a "create check constraint" instruction using the
816
+ current migration context.
817
+
818
+ e.g.::
819
+
820
+ from alembic import op
821
+ from sqlalchemy.sql import column, func
822
+
823
+ op.create_check_constraint(
824
+ "ck_user_name_len",
825
+ "user",
826
+ func.len(column("name")) > 5,
827
+ )
828
+
829
+ CHECK constraints are usually against a SQL expression, so ad-hoc
830
+ table metadata is usually needed. The function will convert the given
831
+ arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound
832
+ to an anonymous table in order to emit the CREATE statement.
833
+
834
+ :param name: Name of the check constraint. The name is necessary
835
+ so that an ALTER statement can be emitted. For setups that
836
+ use an automated naming scheme such as that described at
837
+ :ref:`sqla:constraint_naming_conventions`,
838
+ ``name`` here can be ``None``, as the event listener will
839
+ apply the name to the constraint object when it is associated
840
+ with the table.
841
+ :param table_name: String name of the source table.
842
+ :param condition: SQL expression that's the condition of the
843
+ constraint. Can be a string or SQLAlchemy expression language
844
+ structure.
845
+ :param deferrable: optional bool. If set, emit DEFERRABLE or
846
+ NOT DEFERRABLE when issuing DDL for this constraint.
847
+ :param initially: optional string. If set, emit INITIALLY <value>
848
+ when issuing DDL for this constraint.
849
+ :param schema: Optional schema name to operate within. To control
850
+ quoting of the schema outside of the default behavior, use
851
+ the SQLAlchemy construct
852
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
853
+
854
+ """
855
+ op = cls(constraint_name, table_name, condition, schema=schema, **kw)
856
+ return operations.invoke(op)
857
+
858
+ @classmethod
859
+ def batch_create_check_constraint(
860
+ cls,
861
+ operations: BatchOperations,
862
+ constraint_name: str,
863
+ condition: Union[str, ColumnElement[bool], TextClause],
864
+ **kw: Any,
865
+ ) -> None:
866
+ """Issue a "create check constraint" instruction using the
867
+ current batch migration context.
868
+
869
+ The batch form of this call omits the ``source`` and ``schema``
870
+ arguments from the call.
871
+
872
+ .. seealso::
873
+
874
+ :meth:`.Operations.create_check_constraint`
875
+
876
+ """
877
+ op = cls(
878
+ constraint_name,
879
+ operations.impl.table_name,
880
+ condition,
881
+ schema=operations.impl.schema,
882
+ **kw,
883
+ )
884
+ return operations.invoke(op)
885
+
886
+
887
+ @Operations.register_operation("create_index")
888
+ @BatchOperations.register_operation("create_index", "batch_create_index")
889
+ class CreateIndexOp(MigrateOperation):
890
+ """Represent a create index operation."""
891
+
892
+ def __init__(
893
+ self,
894
+ index_name: Optional[str],
895
+ table_name: str,
896
+ columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
897
+ *,
898
+ schema: Optional[str] = None,
899
+ unique: bool = False,
900
+ if_not_exists: Optional[bool] = None,
901
+ **kw: Any,
902
+ ) -> None:
903
+ self.index_name = index_name
904
+ self.table_name = table_name
905
+ self.columns = columns
906
+ self.schema = schema
907
+ self.unique = unique
908
+ self.if_not_exists = if_not_exists
909
+ self.kw = kw
910
+
911
+ def reverse(self) -> DropIndexOp:
912
+ return DropIndexOp.from_index(self.to_index())
913
+
914
+ def to_diff_tuple(self) -> Tuple[str, Index]:
915
+ return ("add_index", self.to_index())
916
+
917
+ @classmethod
918
+ def from_index(cls, index: Index) -> CreateIndexOp:
919
+ assert index.table is not None
920
+ return cls(
921
+ index.name,
922
+ index.table.name,
923
+ index.expressions,
924
+ schema=index.table.schema,
925
+ unique=index.unique,
926
+ **index.kwargs,
927
+ )
928
+
929
+ def to_index(
930
+ self, migration_context: Optional[MigrationContext] = None
931
+ ) -> Index:
932
+ schema_obj = schemaobj.SchemaObjects(migration_context)
933
+
934
+ idx = schema_obj.index(
935
+ self.index_name,
936
+ self.table_name,
937
+ self.columns,
938
+ schema=self.schema,
939
+ unique=self.unique,
940
+ **self.kw,
941
+ )
942
+ return idx
943
+
944
+ @classmethod
945
+ def create_index(
946
+ cls,
947
+ operations: Operations,
948
+ index_name: Optional[str],
949
+ table_name: str,
950
+ columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
951
+ *,
952
+ schema: Optional[str] = None,
953
+ unique: bool = False,
954
+ if_not_exists: Optional[bool] = None,
955
+ **kw: Any,
956
+ ) -> None:
957
+ r"""Issue a "create index" instruction using the current
958
+ migration context.
959
+
960
+ e.g.::
961
+
962
+ from alembic import op
963
+
964
+ op.create_index("ik_test", "t1", ["foo", "bar"])
965
+
966
+ Functional indexes can be produced by using the
967
+ :func:`sqlalchemy.sql.expression.text` construct::
968
+
969
+ from alembic import op
970
+ from sqlalchemy import text
971
+
972
+ op.create_index("ik_test", "t1", [text("lower(foo)")])
973
+
974
+ :param index_name: name of the index.
975
+ :param table_name: name of the owning table.
976
+ :param columns: a list consisting of string column names and/or
977
+ :func:`~sqlalchemy.sql.expression.text` constructs.
978
+ :param schema: Optional schema name to operate within. To control
979
+ quoting of the schema outside of the default behavior, use
980
+ the SQLAlchemy construct
981
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
982
+ :param unique: If True, create a unique index.
983
+
984
+ :param quote: Force quoting of this column's name on or off,
985
+ corresponding to ``True`` or ``False``. When left at its default
986
+ of ``None``, the column identifier will be quoted according to
987
+ whether the name is case sensitive (identifiers with at least one
988
+ upper case character are treated as case sensitive), or if it's a
989
+ reserved word. This flag is only needed to force quoting of a
990
+ reserved word which is not known by the SQLAlchemy dialect.
991
+
992
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
993
+ creating the new index.
994
+
995
+ .. versionadded:: 1.12.0
996
+
997
+ :param \**kw: Additional keyword arguments not mentioned above are
998
+ dialect specific, and passed in the form
999
+ ``<dialectname>_<argname>``.
1000
+ See the documentation regarding an individual dialect at
1001
+ :ref:`dialect_toplevel` for detail on documented arguments.
1002
+
1003
+ """
1004
+ op = cls(
1005
+ index_name,
1006
+ table_name,
1007
+ columns,
1008
+ schema=schema,
1009
+ unique=unique,
1010
+ if_not_exists=if_not_exists,
1011
+ **kw,
1012
+ )
1013
+ return operations.invoke(op)
1014
+
1015
+ @classmethod
1016
+ def batch_create_index(
1017
+ cls,
1018
+ operations: BatchOperations,
1019
+ index_name: str,
1020
+ columns: List[str],
1021
+ **kw: Any,
1022
+ ) -> None:
1023
+ """Issue a "create index" instruction using the
1024
+ current batch migration context.
1025
+
1026
+ .. seealso::
1027
+
1028
+ :meth:`.Operations.create_index`
1029
+
1030
+ """
1031
+
1032
+ op = cls(
1033
+ index_name,
1034
+ operations.impl.table_name,
1035
+ columns,
1036
+ schema=operations.impl.schema,
1037
+ **kw,
1038
+ )
1039
+ return operations.invoke(op)
1040
+
1041
+
1042
+ @Operations.register_operation("drop_index")
1043
+ @BatchOperations.register_operation("drop_index", "batch_drop_index")
1044
+ class DropIndexOp(MigrateOperation):
1045
+ """Represent a drop index operation."""
1046
+
1047
+ def __init__(
1048
+ self,
1049
+ index_name: Union[quoted_name, str, conv],
1050
+ table_name: Optional[str] = None,
1051
+ *,
1052
+ schema: Optional[str] = None,
1053
+ if_exists: Optional[bool] = None,
1054
+ _reverse: Optional[CreateIndexOp] = None,
1055
+ **kw: Any,
1056
+ ) -> None:
1057
+ self.index_name = index_name
1058
+ self.table_name = table_name
1059
+ self.schema = schema
1060
+ self.if_exists = if_exists
1061
+ self._reverse = _reverse
1062
+ self.kw = kw
1063
+
1064
+ def to_diff_tuple(self) -> Tuple[str, Index]:
1065
+ return ("remove_index", self.to_index())
1066
+
1067
+ def reverse(self) -> CreateIndexOp:
1068
+ return CreateIndexOp.from_index(self.to_index())
1069
+
1070
+ @classmethod
1071
+ def from_index(cls, index: Index) -> DropIndexOp:
1072
+ assert index.table is not None
1073
+ return cls(
1074
+ index.name, # type: ignore[arg-type]
1075
+ table_name=index.table.name,
1076
+ schema=index.table.schema,
1077
+ _reverse=CreateIndexOp.from_index(index),
1078
+ unique=index.unique,
1079
+ **index.kwargs,
1080
+ )
1081
+
1082
+ def to_index(
1083
+ self, migration_context: Optional[MigrationContext] = None
1084
+ ) -> Index:
1085
+ schema_obj = schemaobj.SchemaObjects(migration_context)
1086
+
1087
+ # need a dummy column name here since SQLAlchemy
1088
+ # 0.7.6 and further raises on Index with no columns
1089
+ return schema_obj.index(
1090
+ self.index_name,
1091
+ self.table_name,
1092
+ self._reverse.columns if self._reverse else ["x"],
1093
+ schema=self.schema,
1094
+ **self.kw,
1095
+ )
1096
+
1097
+ @classmethod
1098
+ def drop_index(
1099
+ cls,
1100
+ operations: Operations,
1101
+ index_name: str,
1102
+ table_name: Optional[str] = None,
1103
+ *,
1104
+ schema: Optional[str] = None,
1105
+ if_exists: Optional[bool] = None,
1106
+ **kw: Any,
1107
+ ) -> None:
1108
+ r"""Issue a "drop index" instruction using the current
1109
+ migration context.
1110
+
1111
+ e.g.::
1112
+
1113
+ drop_index("accounts")
1114
+
1115
+ :param index_name: name of the index.
1116
+ :param table_name: name of the owning table. Some
1117
+ backends such as Microsoft SQL Server require this.
1118
+ :param schema: Optional schema name to operate within. To control
1119
+ quoting of the schema outside of the default behavior, use
1120
+ the SQLAlchemy construct
1121
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1122
+
1123
+ :param if_exists: If True, adds IF EXISTS operator when
1124
+ dropping the index.
1125
+
1126
+ .. versionadded:: 1.12.0
1127
+
1128
+ :param \**kw: Additional keyword arguments not mentioned above are
1129
+ dialect specific, and passed in the form
1130
+ ``<dialectname>_<argname>``.
1131
+ See the documentation regarding an individual dialect at
1132
+ :ref:`dialect_toplevel` for detail on documented arguments.
1133
+
1134
+ """
1135
+ op = cls(
1136
+ index_name,
1137
+ table_name=table_name,
1138
+ schema=schema,
1139
+ if_exists=if_exists,
1140
+ **kw,
1141
+ )
1142
+ return operations.invoke(op)
1143
+
1144
+ @classmethod
1145
+ def batch_drop_index(
1146
+ cls, operations: BatchOperations, index_name: str, **kw: Any
1147
+ ) -> None:
1148
+ """Issue a "drop index" instruction using the
1149
+ current batch migration context.
1150
+
1151
+ .. seealso::
1152
+
1153
+ :meth:`.Operations.drop_index`
1154
+
1155
+ """
1156
+
1157
+ op = cls(
1158
+ index_name,
1159
+ table_name=operations.impl.table_name,
1160
+ schema=operations.impl.schema,
1161
+ **kw,
1162
+ )
1163
+ return operations.invoke(op)
1164
+
1165
+
1166
+ @Operations.register_operation("create_table")
1167
+ class CreateTableOp(MigrateOperation):
1168
+ """Represent a create table operation."""
1169
+
1170
+ def __init__(
1171
+ self,
1172
+ table_name: str,
1173
+ columns: Sequence[SchemaItem],
1174
+ *,
1175
+ schema: Optional[str] = None,
1176
+ if_not_exists: Optional[bool] = None,
1177
+ _namespace_metadata: Optional[MetaData] = None,
1178
+ _constraints_included: bool = False,
1179
+ **kw: Any,
1180
+ ) -> None:
1181
+ self.table_name = table_name
1182
+ self.columns = columns
1183
+ self.schema = schema
1184
+ self.if_not_exists = if_not_exists
1185
+ self.info = kw.pop("info", {})
1186
+ self.comment = kw.pop("comment", None)
1187
+ self.prefixes = kw.pop("prefixes", None)
1188
+ self.kw = kw
1189
+ self._namespace_metadata = _namespace_metadata
1190
+ self._constraints_included = _constraints_included
1191
+
1192
+ def reverse(self) -> DropTableOp:
1193
+ return DropTableOp.from_table(
1194
+ self.to_table(), _namespace_metadata=self._namespace_metadata
1195
+ )
1196
+
1197
+ def to_diff_tuple(self) -> Tuple[str, Table]:
1198
+ return ("add_table", self.to_table())
1199
+
1200
+ @classmethod
1201
+ def from_table(
1202
+ cls, table: Table, *, _namespace_metadata: Optional[MetaData] = None
1203
+ ) -> CreateTableOp:
1204
+ if _namespace_metadata is None:
1205
+ _namespace_metadata = table.metadata
1206
+
1207
+ return cls(
1208
+ table.name,
1209
+ list(table.c) + list(table.constraints),
1210
+ schema=table.schema,
1211
+ _namespace_metadata=_namespace_metadata,
1212
+ # given a Table() object, this Table will contain full Index()
1213
+ # and UniqueConstraint objects already constructed in response to
1214
+ # each unique=True / index=True flag on a Column. Carry this
1215
+ # state along so that when we re-convert back into a Table, we
1216
+ # skip unique=True/index=True so that these constraints are
1217
+ # not doubled up. see #844 #848
1218
+ _constraints_included=True,
1219
+ comment=table.comment,
1220
+ info=dict(table.info),
1221
+ prefixes=list(table._prefixes),
1222
+ **table.kwargs,
1223
+ )
1224
+
1225
+ def to_table(
1226
+ self, migration_context: Optional[MigrationContext] = None
1227
+ ) -> Table:
1228
+ schema_obj = schemaobj.SchemaObjects(migration_context)
1229
+
1230
+ return schema_obj.table(
1231
+ self.table_name,
1232
+ *self.columns,
1233
+ schema=self.schema,
1234
+ prefixes=list(self.prefixes) if self.prefixes else [],
1235
+ comment=self.comment,
1236
+ info=self.info.copy() if self.info else {},
1237
+ _constraints_included=self._constraints_included,
1238
+ **self.kw,
1239
+ )
1240
+
1241
+ @classmethod
1242
+ def create_table(
1243
+ cls,
1244
+ operations: Operations,
1245
+ table_name: str,
1246
+ *columns: SchemaItem,
1247
+ if_not_exists: Optional[bool] = None,
1248
+ **kw: Any,
1249
+ ) -> Table:
1250
+ r"""Issue a "create table" instruction using the current migration
1251
+ context.
1252
+
1253
+ This directive receives an argument list similar to that of the
1254
+ traditional :class:`sqlalchemy.schema.Table` construct, but without the
1255
+ metadata::
1256
+
1257
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
1258
+ from alembic import op
1259
+
1260
+ op.create_table(
1261
+ "account",
1262
+ Column("id", INTEGER, primary_key=True),
1263
+ Column("name", VARCHAR(50), nullable=False),
1264
+ Column("description", NVARCHAR(200)),
1265
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
1266
+ )
1267
+
1268
+ Note that :meth:`.create_table` accepts
1269
+ :class:`~sqlalchemy.schema.Column`
1270
+ constructs directly from the SQLAlchemy library. In particular,
1271
+ default values to be created on the database side are
1272
+ specified using the ``server_default`` parameter, and not
1273
+ ``default`` which only specifies Python-side defaults::
1274
+
1275
+ from alembic import op
1276
+ from sqlalchemy import Column, TIMESTAMP, func
1277
+
1278
+ # specify "DEFAULT NOW" along with the "timestamp" column
1279
+ op.create_table(
1280
+ "account",
1281
+ Column("id", INTEGER, primary_key=True),
1282
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
1283
+ )
1284
+
1285
+ The function also returns a newly created
1286
+ :class:`~sqlalchemy.schema.Table` object, corresponding to the table
1287
+ specification given, which is suitable for
1288
+ immediate SQL operations, in particular
1289
+ :meth:`.Operations.bulk_insert`::
1290
+
1291
+ from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column
1292
+ from alembic import op
1293
+
1294
+ account_table = op.create_table(
1295
+ "account",
1296
+ Column("id", INTEGER, primary_key=True),
1297
+ Column("name", VARCHAR(50), nullable=False),
1298
+ Column("description", NVARCHAR(200)),
1299
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
1300
+ )
1301
+
1302
+ op.bulk_insert(
1303
+ account_table,
1304
+ [
1305
+ {"name": "A1", "description": "account 1"},
1306
+ {"name": "A2", "description": "account 2"},
1307
+ ],
1308
+ )
1309
+
1310
+ :param table_name: Name of the table
1311
+ :param \*columns: collection of :class:`~sqlalchemy.schema.Column`
1312
+ objects within
1313
+ the table, as well as optional :class:`~sqlalchemy.schema.Constraint`
1314
+ objects
1315
+ and :class:`~.sqlalchemy.schema.Index` objects.
1316
+ :param schema: Optional schema name to operate within. To control
1317
+ quoting of the schema outside of the default behavior, use
1318
+ the SQLAlchemy construct
1319
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1320
+ :param if_not_exists: If True, adds IF NOT EXISTS operator when
1321
+ creating the new table.
1322
+
1323
+ .. versionadded:: 1.13.3
1324
+ :param \**kw: Other keyword arguments are passed to the underlying
1325
+ :class:`sqlalchemy.schema.Table` object created for the command.
1326
+
1327
+ :return: the :class:`~sqlalchemy.schema.Table` object corresponding
1328
+ to the parameters given.
1329
+
1330
+ """
1331
+ op = cls(table_name, columns, if_not_exists=if_not_exists, **kw)
1332
+ return operations.invoke(op)
1333
+
1334
+
1335
+ @Operations.register_operation("drop_table")
1336
+ class DropTableOp(MigrateOperation):
1337
+ """Represent a drop table operation."""
1338
+
1339
+ def __init__(
1340
+ self,
1341
+ table_name: str,
1342
+ *,
1343
+ schema: Optional[str] = None,
1344
+ if_exists: Optional[bool] = None,
1345
+ table_kw: Optional[MutableMapping[Any, Any]] = None,
1346
+ _reverse: Optional[CreateTableOp] = None,
1347
+ ) -> None:
1348
+ self.table_name = table_name
1349
+ self.schema = schema
1350
+ self.if_exists = if_exists
1351
+ self.table_kw = table_kw or {}
1352
+ self.comment = self.table_kw.pop("comment", None)
1353
+ self.info = self.table_kw.pop("info", None)
1354
+ self.prefixes = self.table_kw.pop("prefixes", None)
1355
+ self._reverse = _reverse
1356
+
1357
+ def to_diff_tuple(self) -> Tuple[str, Table]:
1358
+ return ("remove_table", self.to_table())
1359
+
1360
+ def reverse(self) -> CreateTableOp:
1361
+ return CreateTableOp.from_table(self.to_table())
1362
+
1363
+ @classmethod
1364
+ def from_table(
1365
+ cls, table: Table, *, _namespace_metadata: Optional[MetaData] = None
1366
+ ) -> DropTableOp:
1367
+ return cls(
1368
+ table.name,
1369
+ schema=table.schema,
1370
+ table_kw={
1371
+ "comment": table.comment,
1372
+ "info": dict(table.info),
1373
+ "prefixes": list(table._prefixes),
1374
+ **table.kwargs,
1375
+ },
1376
+ _reverse=CreateTableOp.from_table(
1377
+ table, _namespace_metadata=_namespace_metadata
1378
+ ),
1379
+ )
1380
+
1381
+ def to_table(
1382
+ self, migration_context: Optional[MigrationContext] = None
1383
+ ) -> Table:
1384
+ if self._reverse:
1385
+ cols_and_constraints = self._reverse.columns
1386
+ else:
1387
+ cols_and_constraints = []
1388
+
1389
+ schema_obj = schemaobj.SchemaObjects(migration_context)
1390
+ t = schema_obj.table(
1391
+ self.table_name,
1392
+ *cols_and_constraints,
1393
+ comment=self.comment,
1394
+ info=self.info.copy() if self.info else {},
1395
+ prefixes=list(self.prefixes) if self.prefixes else [],
1396
+ schema=self.schema,
1397
+ _constraints_included=(
1398
+ self._reverse._constraints_included if self._reverse else False
1399
+ ),
1400
+ **self.table_kw,
1401
+ )
1402
+ return t
1403
+
1404
+ @classmethod
1405
+ def drop_table(
1406
+ cls,
1407
+ operations: Operations,
1408
+ table_name: str,
1409
+ *,
1410
+ schema: Optional[str] = None,
1411
+ if_exists: Optional[bool] = None,
1412
+ **kw: Any,
1413
+ ) -> None:
1414
+ r"""Issue a "drop table" instruction using the current
1415
+ migration context.
1416
+
1417
+
1418
+ e.g.::
1419
+
1420
+ drop_table("accounts")
1421
+
1422
+ :param table_name: Name of the table
1423
+ :param schema: Optional schema name to operate within. To control
1424
+ quoting of the schema outside of the default behavior, use
1425
+ the SQLAlchemy construct
1426
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1427
+ :param if_exists: If True, adds IF EXISTS operator when
1428
+ dropping the table.
1429
+
1430
+ .. versionadded:: 1.13.3
1431
+ :param \**kw: Other keyword arguments are passed to the underlying
1432
+ :class:`sqlalchemy.schema.Table` object created for the command.
1433
+
1434
+ """
1435
+ op = cls(table_name, schema=schema, if_exists=if_exists, table_kw=kw)
1436
+ operations.invoke(op)
1437
+
1438
+
1439
+ class AlterTableOp(MigrateOperation):
1440
+ """Represent an alter table operation."""
1441
+
1442
+ def __init__(
1443
+ self,
1444
+ table_name: str,
1445
+ *,
1446
+ schema: Optional[str] = None,
1447
+ ) -> None:
1448
+ self.table_name = table_name
1449
+ self.schema = schema
1450
+
1451
+
1452
+ @Operations.register_operation("rename_table")
1453
+ class RenameTableOp(AlterTableOp):
1454
+ """Represent a rename table operation."""
1455
+
1456
+ def __init__(
1457
+ self,
1458
+ old_table_name: str,
1459
+ new_table_name: str,
1460
+ *,
1461
+ schema: Optional[str] = None,
1462
+ ) -> None:
1463
+ super().__init__(old_table_name, schema=schema)
1464
+ self.new_table_name = new_table_name
1465
+
1466
+ @classmethod
1467
+ def rename_table(
1468
+ cls,
1469
+ operations: Operations,
1470
+ old_table_name: str,
1471
+ new_table_name: str,
1472
+ *,
1473
+ schema: Optional[str] = None,
1474
+ ) -> None:
1475
+ """Emit an ALTER TABLE to rename a table.
1476
+
1477
+ :param old_table_name: old name.
1478
+ :param new_table_name: new name.
1479
+ :param schema: Optional schema name to operate within. To control
1480
+ quoting of the schema outside of the default behavior, use
1481
+ the SQLAlchemy construct
1482
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1483
+
1484
+ """
1485
+ op = cls(old_table_name, new_table_name, schema=schema)
1486
+ return operations.invoke(op)
1487
+
1488
+
1489
+ @Operations.register_operation("create_table_comment")
1490
+ @BatchOperations.register_operation(
1491
+ "create_table_comment", "batch_create_table_comment"
1492
+ )
1493
+ class CreateTableCommentOp(AlterTableOp):
1494
+ """Represent a COMMENT ON `table` operation."""
1495
+
1496
+ def __init__(
1497
+ self,
1498
+ table_name: str,
1499
+ comment: Optional[str],
1500
+ *,
1501
+ schema: Optional[str] = None,
1502
+ existing_comment: Optional[str] = None,
1503
+ ) -> None:
1504
+ self.table_name = table_name
1505
+ self.comment = comment
1506
+ self.existing_comment = existing_comment
1507
+ self.schema = schema
1508
+
1509
+ @classmethod
1510
+ def create_table_comment(
1511
+ cls,
1512
+ operations: Operations,
1513
+ table_name: str,
1514
+ comment: Optional[str],
1515
+ *,
1516
+ existing_comment: Optional[str] = None,
1517
+ schema: Optional[str] = None,
1518
+ ) -> None:
1519
+ """Emit a COMMENT ON operation to set the comment for a table.
1520
+
1521
+ :param table_name: string name of the target table.
1522
+ :param comment: string value of the comment being registered against
1523
+ the specified table.
1524
+ :param existing_comment: String value of a comment
1525
+ already registered on the specified table, used within autogenerate
1526
+ so that the operation is reversible, but not required for direct
1527
+ use.
1528
+
1529
+ .. seealso::
1530
+
1531
+ :meth:`.Operations.drop_table_comment`
1532
+
1533
+ :paramref:`.Operations.alter_column.comment`
1534
+
1535
+ """
1536
+
1537
+ op = cls(
1538
+ table_name,
1539
+ comment,
1540
+ existing_comment=existing_comment,
1541
+ schema=schema,
1542
+ )
1543
+ return operations.invoke(op)
1544
+
1545
+ @classmethod
1546
+ def batch_create_table_comment(
1547
+ cls,
1548
+ operations: BatchOperations,
1549
+ comment: Optional[str],
1550
+ *,
1551
+ existing_comment: Optional[str] = None,
1552
+ ) -> None:
1553
+ """Emit a COMMENT ON operation to set the comment for a table
1554
+ using the current batch migration context.
1555
+
1556
+ :param comment: string value of the comment being registered against
1557
+ the specified table.
1558
+ :param existing_comment: String value of a comment
1559
+ already registered on the specified table, used within autogenerate
1560
+ so that the operation is reversible, but not required for direct
1561
+ use.
1562
+
1563
+ """
1564
+
1565
+ op = cls(
1566
+ operations.impl.table_name,
1567
+ comment,
1568
+ existing_comment=existing_comment,
1569
+ schema=operations.impl.schema,
1570
+ )
1571
+ return operations.invoke(op)
1572
+
1573
+ def reverse(self) -> Union[CreateTableCommentOp, DropTableCommentOp]:
1574
+ """Reverses the COMMENT ON operation against a table."""
1575
+ if self.existing_comment is None:
1576
+ return DropTableCommentOp(
1577
+ self.table_name,
1578
+ existing_comment=self.comment,
1579
+ schema=self.schema,
1580
+ )
1581
+ else:
1582
+ return CreateTableCommentOp(
1583
+ self.table_name,
1584
+ self.existing_comment,
1585
+ existing_comment=self.comment,
1586
+ schema=self.schema,
1587
+ )
1588
+
1589
+ def to_table(
1590
+ self, migration_context: Optional[MigrationContext] = None
1591
+ ) -> Table:
1592
+ schema_obj = schemaobj.SchemaObjects(migration_context)
1593
+
1594
+ return schema_obj.table(
1595
+ self.table_name, schema=self.schema, comment=self.comment
1596
+ )
1597
+
1598
+ def to_diff_tuple(self) -> Tuple[Any, ...]:
1599
+ return ("add_table_comment", self.to_table(), self.existing_comment)
1600
+
1601
+
1602
+ @Operations.register_operation("drop_table_comment")
1603
+ @BatchOperations.register_operation(
1604
+ "drop_table_comment", "batch_drop_table_comment"
1605
+ )
1606
+ class DropTableCommentOp(AlterTableOp):
1607
+ """Represent an operation to remove the comment from a table."""
1608
+
1609
+ def __init__(
1610
+ self,
1611
+ table_name: str,
1612
+ *,
1613
+ schema: Optional[str] = None,
1614
+ existing_comment: Optional[str] = None,
1615
+ ) -> None:
1616
+ self.table_name = table_name
1617
+ self.existing_comment = existing_comment
1618
+ self.schema = schema
1619
+
1620
+ @classmethod
1621
+ def drop_table_comment(
1622
+ cls,
1623
+ operations: Operations,
1624
+ table_name: str,
1625
+ *,
1626
+ existing_comment: Optional[str] = None,
1627
+ schema: Optional[str] = None,
1628
+ ) -> None:
1629
+ """Issue a "drop table comment" operation to
1630
+ remove an existing comment set on a table.
1631
+
1632
+ :param table_name: string name of the target table.
1633
+ :param existing_comment: An optional string value of a comment already
1634
+ registered on the specified table.
1635
+
1636
+ .. seealso::
1637
+
1638
+ :meth:`.Operations.create_table_comment`
1639
+
1640
+ :paramref:`.Operations.alter_column.comment`
1641
+
1642
+ """
1643
+
1644
+ op = cls(table_name, existing_comment=existing_comment, schema=schema)
1645
+ return operations.invoke(op)
1646
+
1647
+ @classmethod
1648
+ def batch_drop_table_comment(
1649
+ cls,
1650
+ operations: BatchOperations,
1651
+ *,
1652
+ existing_comment: Optional[str] = None,
1653
+ ) -> None:
1654
+ """Issue a "drop table comment" operation to
1655
+ remove an existing comment set on a table using the current
1656
+ batch operations context.
1657
+
1658
+ :param existing_comment: An optional string value of a comment already
1659
+ registered on the specified table.
1660
+
1661
+ """
1662
+
1663
+ op = cls(
1664
+ operations.impl.table_name,
1665
+ existing_comment=existing_comment,
1666
+ schema=operations.impl.schema,
1667
+ )
1668
+ return operations.invoke(op)
1669
+
1670
+ def reverse(self) -> CreateTableCommentOp:
1671
+ """Reverses the COMMENT ON operation against a table."""
1672
+ return CreateTableCommentOp(
1673
+ self.table_name, self.existing_comment, schema=self.schema
1674
+ )
1675
+
1676
+ def to_table(
1677
+ self, migration_context: Optional[MigrationContext] = None
1678
+ ) -> Table:
1679
+ schema_obj = schemaobj.SchemaObjects(migration_context)
1680
+
1681
+ return schema_obj.table(self.table_name, schema=self.schema)
1682
+
1683
+ def to_diff_tuple(self) -> Tuple[Any, ...]:
1684
+ return ("remove_table_comment", self.to_table())
1685
+
1686
+
1687
+ @Operations.register_operation("alter_column")
1688
+ @BatchOperations.register_operation("alter_column", "batch_alter_column")
1689
+ class AlterColumnOp(AlterTableOp):
1690
+ """Represent an alter column operation."""
1691
+
1692
+ def __init__(
1693
+ self,
1694
+ table_name: str,
1695
+ column_name: str,
1696
+ *,
1697
+ schema: Optional[str] = None,
1698
+ existing_type: Optional[Any] = None,
1699
+ existing_server_default: Any = False,
1700
+ existing_nullable: Optional[bool] = None,
1701
+ existing_comment: Optional[str] = None,
1702
+ modify_nullable: Optional[bool] = None,
1703
+ modify_comment: Optional[Union[str, Literal[False]]] = False,
1704
+ modify_server_default: Any = False,
1705
+ modify_name: Optional[str] = None,
1706
+ modify_type: Optional[Any] = None,
1707
+ **kw: Any,
1708
+ ) -> None:
1709
+ super().__init__(table_name, schema=schema)
1710
+ self.column_name = column_name
1711
+ self.existing_type = existing_type
1712
+ self.existing_server_default = existing_server_default
1713
+ self.existing_nullable = existing_nullable
1714
+ self.existing_comment = existing_comment
1715
+ self.modify_nullable = modify_nullable
1716
+ self.modify_comment = modify_comment
1717
+ self.modify_server_default = modify_server_default
1718
+ self.modify_name = modify_name
1719
+ self.modify_type = modify_type
1720
+ self.kw = kw
1721
+
1722
+ def to_diff_tuple(self) -> Any:
1723
+ col_diff = []
1724
+ schema, tname, cname = self.schema, self.table_name, self.column_name
1725
+
1726
+ if self.modify_type is not None:
1727
+ col_diff.append(
1728
+ (
1729
+ "modify_type",
1730
+ schema,
1731
+ tname,
1732
+ cname,
1733
+ {
1734
+ "existing_nullable": self.existing_nullable,
1735
+ "existing_server_default": (
1736
+ self.existing_server_default
1737
+ ),
1738
+ "existing_comment": self.existing_comment,
1739
+ },
1740
+ self.existing_type,
1741
+ self.modify_type,
1742
+ )
1743
+ )
1744
+
1745
+ if self.modify_nullable is not None:
1746
+ col_diff.append(
1747
+ (
1748
+ "modify_nullable",
1749
+ schema,
1750
+ tname,
1751
+ cname,
1752
+ {
1753
+ "existing_type": self.existing_type,
1754
+ "existing_server_default": (
1755
+ self.existing_server_default
1756
+ ),
1757
+ "existing_comment": self.existing_comment,
1758
+ },
1759
+ self.existing_nullable,
1760
+ self.modify_nullable,
1761
+ )
1762
+ )
1763
+
1764
+ if self.modify_server_default is not False:
1765
+ col_diff.append(
1766
+ (
1767
+ "modify_default",
1768
+ schema,
1769
+ tname,
1770
+ cname,
1771
+ {
1772
+ "existing_nullable": self.existing_nullable,
1773
+ "existing_type": self.existing_type,
1774
+ "existing_comment": self.existing_comment,
1775
+ },
1776
+ self.existing_server_default,
1777
+ self.modify_server_default,
1778
+ )
1779
+ )
1780
+
1781
+ if self.modify_comment is not False:
1782
+ col_diff.append(
1783
+ (
1784
+ "modify_comment",
1785
+ schema,
1786
+ tname,
1787
+ cname,
1788
+ {
1789
+ "existing_nullable": self.existing_nullable,
1790
+ "existing_type": self.existing_type,
1791
+ "existing_server_default": (
1792
+ self.existing_server_default
1793
+ ),
1794
+ },
1795
+ self.existing_comment,
1796
+ self.modify_comment,
1797
+ )
1798
+ )
1799
+
1800
+ return col_diff
1801
+
1802
+ def has_changes(self) -> bool:
1803
+ hc1 = (
1804
+ self.modify_nullable is not None
1805
+ or self.modify_server_default is not False
1806
+ or self.modify_type is not None
1807
+ or self.modify_comment is not False
1808
+ )
1809
+ if hc1:
1810
+ return True
1811
+ for kw in self.kw:
1812
+ if kw.startswith("modify_"):
1813
+ return True
1814
+ else:
1815
+ return False
1816
+
1817
+ def reverse(self) -> AlterColumnOp:
1818
+ kw = self.kw.copy()
1819
+ kw["existing_type"] = self.existing_type
1820
+ kw["existing_nullable"] = self.existing_nullable
1821
+ kw["existing_server_default"] = self.existing_server_default
1822
+ kw["existing_comment"] = self.existing_comment
1823
+ if self.modify_type is not None:
1824
+ kw["modify_type"] = self.modify_type
1825
+ if self.modify_nullable is not None:
1826
+ kw["modify_nullable"] = self.modify_nullable
1827
+ if self.modify_server_default is not False:
1828
+ kw["modify_server_default"] = self.modify_server_default
1829
+ if self.modify_comment is not False:
1830
+ kw["modify_comment"] = self.modify_comment
1831
+
1832
+ # TODO: make this a little simpler
1833
+ all_keys = {
1834
+ m.group(1)
1835
+ for m in [re.match(r"^(?:existing_|modify_)(.+)$", k) for k in kw]
1836
+ if m
1837
+ }
1838
+
1839
+ for k in all_keys:
1840
+ if "modify_%s" % k in kw:
1841
+ swap = kw["existing_%s" % k]
1842
+ kw["existing_%s" % k] = kw["modify_%s" % k]
1843
+ kw["modify_%s" % k] = swap
1844
+
1845
+ return self.__class__(
1846
+ self.table_name, self.column_name, schema=self.schema, **kw
1847
+ )
1848
+
1849
+ @classmethod
1850
+ def alter_column(
1851
+ cls,
1852
+ operations: Operations,
1853
+ table_name: str,
1854
+ column_name: str,
1855
+ *,
1856
+ nullable: Optional[bool] = None,
1857
+ comment: Optional[Union[str, Literal[False]]] = False,
1858
+ server_default: Union[
1859
+ str, bool, Identity, Computed, TextClause, None
1860
+ ] = False,
1861
+ new_column_name: Optional[str] = None,
1862
+ type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = None,
1863
+ existing_type: Optional[
1864
+ Union[TypeEngine[Any], Type[TypeEngine[Any]]]
1865
+ ] = None,
1866
+ existing_server_default: Union[
1867
+ str, bool, Identity, Computed, TextClause, None
1868
+ ] = False,
1869
+ existing_nullable: Optional[bool] = None,
1870
+ existing_comment: Optional[str] = None,
1871
+ schema: Optional[str] = None,
1872
+ **kw: Any,
1873
+ ) -> None:
1874
+ r"""Issue an "alter column" instruction using the
1875
+ current migration context.
1876
+
1877
+ Generally, only that aspect of the column which
1878
+ is being changed, i.e. name, type, nullability,
1879
+ default, needs to be specified. Multiple changes
1880
+ can also be specified at once and the backend should
1881
+ "do the right thing", emitting each change either
1882
+ separately or together as the backend allows.
1883
+
1884
+ MySQL has special requirements here, since MySQL
1885
+ cannot ALTER a column without a full specification.
1886
+ When producing MySQL-compatible migration files,
1887
+ it is recommended that the ``existing_type``,
1888
+ ``existing_server_default``, and ``existing_nullable``
1889
+ parameters be present, if not being altered.
1890
+
1891
+ Type changes which are against the SQLAlchemy
1892
+ "schema" types :class:`~sqlalchemy.types.Boolean`
1893
+ and :class:`~sqlalchemy.types.Enum` may also
1894
+ add or drop constraints which accompany those
1895
+ types on backends that don't support them natively.
1896
+ The ``existing_type`` argument is
1897
+ used in this case to identify and remove a previous
1898
+ constraint that was bound to the type object.
1899
+
1900
+ :param table_name: string name of the target table.
1901
+ :param column_name: string name of the target column,
1902
+ as it exists before the operation begins.
1903
+ :param nullable: Optional; specify ``True`` or ``False``
1904
+ to alter the column's nullability.
1905
+ :param server_default: Optional; specify a string
1906
+ SQL expression, :func:`~sqlalchemy.sql.expression.text`,
1907
+ or :class:`~sqlalchemy.schema.DefaultClause` to indicate
1908
+ an alteration to the column's default value.
1909
+ Set to ``None`` to have the default removed.
1910
+ :param comment: optional string text of a new comment to add to the
1911
+ column.
1912
+ :param new_column_name: Optional; specify a string name here to
1913
+ indicate the new name within a column rename operation.
1914
+ :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine`
1915
+ type object to specify a change to the column's type.
1916
+ For SQLAlchemy types that also indicate a constraint (i.e.
1917
+ :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`),
1918
+ the constraint is also generated.
1919
+ :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column;
1920
+ currently understood by the MySQL dialect.
1921
+ :param existing_type: Optional; a
1922
+ :class:`~sqlalchemy.types.TypeEngine`
1923
+ type object to specify the previous type. This
1924
+ is required for all MySQL column alter operations that
1925
+ don't otherwise specify a new type, as well as for
1926
+ when nullability is being changed on a SQL Server
1927
+ column. It is also used if the type is a so-called
1928
+ SQLAlchemy "schema" type which may define a constraint (i.e.
1929
+ :class:`~sqlalchemy.types.Boolean`,
1930
+ :class:`~sqlalchemy.types.Enum`),
1931
+ so that the constraint can be dropped.
1932
+ :param existing_server_default: Optional; The existing
1933
+ default value of the column. Required on MySQL if
1934
+ an existing default is not being changed; else MySQL
1935
+ removes the default.
1936
+ :param existing_nullable: Optional; the existing nullability
1937
+ of the column. Required on MySQL if the existing nullability
1938
+ is not being changed; else MySQL sets this to NULL.
1939
+ :param existing_autoincrement: Optional; the existing autoincrement
1940
+ of the column. Used for MySQL's system of altering a column
1941
+ that specifies ``AUTO_INCREMENT``.
1942
+ :param existing_comment: string text of the existing comment on the
1943
+ column to be maintained. Required on MySQL if the existing comment
1944
+ on the column is not being changed.
1945
+ :param schema: Optional schema name to operate within. To control
1946
+ quoting of the schema outside of the default behavior, use
1947
+ the SQLAlchemy construct
1948
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
1949
+ :param postgresql_using: String argument which will indicate a
1950
+ SQL expression to render within the Postgresql-specific USING clause
1951
+ within ALTER COLUMN. This string is taken directly as raw SQL which
1952
+ must explicitly include any necessary quoting or escaping of tokens
1953
+ within the expression.
1954
+
1955
+ """
1956
+
1957
+ alt = cls(
1958
+ table_name,
1959
+ column_name,
1960
+ schema=schema,
1961
+ existing_type=existing_type,
1962
+ existing_server_default=existing_server_default,
1963
+ existing_nullable=existing_nullable,
1964
+ existing_comment=existing_comment,
1965
+ modify_name=new_column_name,
1966
+ modify_type=type_,
1967
+ modify_server_default=server_default,
1968
+ modify_nullable=nullable,
1969
+ modify_comment=comment,
1970
+ **kw,
1971
+ )
1972
+
1973
+ return operations.invoke(alt)
1974
+
1975
+ @classmethod
1976
+ def batch_alter_column(
1977
+ cls,
1978
+ operations: BatchOperations,
1979
+ column_name: str,
1980
+ *,
1981
+ nullable: Optional[bool] = None,
1982
+ comment: Optional[Union[str, Literal[False]]] = False,
1983
+ server_default: Any = False,
1984
+ new_column_name: Optional[str] = None,
1985
+ type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = None,
1986
+ existing_type: Optional[
1987
+ Union[TypeEngine[Any], Type[TypeEngine[Any]]]
1988
+ ] = None,
1989
+ existing_server_default: Optional[
1990
+ Union[str, bool, Identity, Computed]
1991
+ ] = False,
1992
+ existing_nullable: Optional[bool] = None,
1993
+ existing_comment: Optional[str] = None,
1994
+ insert_before: Optional[str] = None,
1995
+ insert_after: Optional[str] = None,
1996
+ **kw: Any,
1997
+ ) -> None:
1998
+ """Issue an "alter column" instruction using the current
1999
+ batch migration context.
2000
+
2001
+ Parameters are the same as that of :meth:`.Operations.alter_column`,
2002
+ as well as the following option(s):
2003
+
2004
+ :param insert_before: String name of an existing column which this
2005
+ column should be placed before, when creating the new table.
2006
+
2007
+ :param insert_after: String name of an existing column which this
2008
+ column should be placed after, when creating the new table. If
2009
+ both :paramref:`.BatchOperations.alter_column.insert_before`
2010
+ and :paramref:`.BatchOperations.alter_column.insert_after` are
2011
+ omitted, the column is inserted after the last existing column
2012
+ in the table.
2013
+
2014
+ .. seealso::
2015
+
2016
+ :meth:`.Operations.alter_column`
2017
+
2018
+
2019
+ """
2020
+ alt = cls(
2021
+ operations.impl.table_name,
2022
+ column_name,
2023
+ schema=operations.impl.schema,
2024
+ existing_type=existing_type,
2025
+ existing_server_default=existing_server_default,
2026
+ existing_nullable=existing_nullable,
2027
+ existing_comment=existing_comment,
2028
+ modify_name=new_column_name,
2029
+ modify_type=type_,
2030
+ modify_server_default=server_default,
2031
+ modify_nullable=nullable,
2032
+ modify_comment=comment,
2033
+ insert_before=insert_before,
2034
+ insert_after=insert_after,
2035
+ **kw,
2036
+ )
2037
+
2038
+ return operations.invoke(alt)
2039
+
2040
+
2041
+ @Operations.register_operation("add_column")
2042
+ @BatchOperations.register_operation("add_column", "batch_add_column")
2043
+ class AddColumnOp(AlterTableOp):
2044
+ """Represent an add column operation."""
2045
+
2046
+ def __init__(
2047
+ self,
2048
+ table_name: str,
2049
+ column: Column[Any],
2050
+ *,
2051
+ schema: Optional[str] = None,
2052
+ if_not_exists: Optional[bool] = None,
2053
+ **kw: Any,
2054
+ ) -> None:
2055
+ super().__init__(table_name, schema=schema)
2056
+ self.column = column
2057
+ self.if_not_exists = if_not_exists
2058
+ self.kw = kw
2059
+
2060
+ def reverse(self) -> DropColumnOp:
2061
+ op = DropColumnOp.from_column_and_tablename(
2062
+ self.schema, self.table_name, self.column
2063
+ )
2064
+ op.if_exists = self.if_not_exists
2065
+ return op
2066
+
2067
+ def to_diff_tuple(
2068
+ self,
2069
+ ) -> Tuple[str, Optional[str], str, Column[Any]]:
2070
+ return ("add_column", self.schema, self.table_name, self.column)
2071
+
2072
+ def to_column(self) -> Column[Any]:
2073
+ return self.column
2074
+
2075
+ @classmethod
2076
+ def from_column(cls, col: Column[Any]) -> AddColumnOp:
2077
+ return cls(col.table.name, col, schema=col.table.schema)
2078
+
2079
+ @classmethod
2080
+ def from_column_and_tablename(
2081
+ cls,
2082
+ schema: Optional[str],
2083
+ tname: str,
2084
+ col: Column[Any],
2085
+ ) -> AddColumnOp:
2086
+ return cls(tname, col, schema=schema)
2087
+
2088
+ @classmethod
2089
+ def add_column(
2090
+ cls,
2091
+ operations: Operations,
2092
+ table_name: str,
2093
+ column: Column[Any],
2094
+ *,
2095
+ schema: Optional[str] = None,
2096
+ if_not_exists: Optional[bool] = None,
2097
+ ) -> None:
2098
+ """Issue an "add column" instruction using the current
2099
+ migration context.
2100
+
2101
+ e.g.::
2102
+
2103
+ from alembic import op
2104
+ from sqlalchemy import Column, String
2105
+
2106
+ op.add_column("organization", Column("name", String()))
2107
+
2108
+ The :meth:`.Operations.add_column` method typically corresponds
2109
+ to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope
2110
+ of this command, the column's name, datatype, nullability,
2111
+ and optional server-generated defaults may be indicated.
2112
+
2113
+ .. note::
2114
+
2115
+ With the exception of NOT NULL constraints or single-column FOREIGN
2116
+ KEY constraints, other kinds of constraints such as PRIMARY KEY,
2117
+ UNIQUE or CHECK constraints **cannot** be generated using this
2118
+ method; for these constraints, refer to operations such as
2119
+ :meth:`.Operations.create_primary_key` and
2120
+ :meth:`.Operations.create_check_constraint`. In particular, the
2121
+ following :class:`~sqlalchemy.schema.Column` parameters are
2122
+ **ignored**:
2123
+
2124
+ * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases
2125
+ typically do not support an ALTER operation that can add
2126
+ individual columns one at a time to an existing primary key
2127
+ constraint, therefore it's less ambiguous to use the
2128
+ :meth:`.Operations.create_primary_key` method, which assumes no
2129
+ existing primary key constraint is present.
2130
+ * :paramref:`~sqlalchemy.schema.Column.unique` - use the
2131
+ :meth:`.Operations.create_unique_constraint` method
2132
+ * :paramref:`~sqlalchemy.schema.Column.index` - use the
2133
+ :meth:`.Operations.create_index` method
2134
+
2135
+
2136
+ The provided :class:`~sqlalchemy.schema.Column` object may include a
2137
+ :class:`~sqlalchemy.schema.ForeignKey` constraint directive,
2138
+ referencing a remote table name. For this specific type of constraint,
2139
+ Alembic will automatically emit a second ALTER statement in order to
2140
+ add the single-column FOREIGN KEY constraint separately::
2141
+
2142
+ from alembic import op
2143
+ from sqlalchemy import Column, INTEGER, ForeignKey
2144
+
2145
+ op.add_column(
2146
+ "organization",
2147
+ Column("account_id", INTEGER, ForeignKey("accounts.id")),
2148
+ )
2149
+
2150
+ The column argument passed to :meth:`.Operations.add_column` is a
2151
+ :class:`~sqlalchemy.schema.Column` construct, used in the same way it's
2152
+ used in SQLAlchemy. In particular, values or functions to be indicated
2153
+ as producing the column's default value on the database side are
2154
+ specified using the ``server_default`` parameter, and not ``default``
2155
+ which only specifies Python-side defaults::
2156
+
2157
+ from alembic import op
2158
+ from sqlalchemy import Column, TIMESTAMP, func
2159
+
2160
+ # specify "DEFAULT NOW" along with the column add
2161
+ op.add_column(
2162
+ "account",
2163
+ Column("timestamp", TIMESTAMP, server_default=func.now()),
2164
+ )
2165
+
2166
+ :param table_name: String name of the parent table.
2167
+ :param column: a :class:`sqlalchemy.schema.Column` object
2168
+ representing the new column.
2169
+ :param schema: Optional schema name to operate within. To control
2170
+ quoting of the schema outside of the default behavior, use
2171
+ the SQLAlchemy construct
2172
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
2173
+ :param if_not_exists: If True, adds IF NOT EXISTS operator
2174
+ when creating the new column for compatible dialects
2175
+
2176
+ .. versionadded:: 1.16.0
2177
+
2178
+ """
2179
+
2180
+ op = cls(
2181
+ table_name,
2182
+ column,
2183
+ schema=schema,
2184
+ if_not_exists=if_not_exists,
2185
+ )
2186
+ return operations.invoke(op)
2187
+
2188
+ @classmethod
2189
+ def batch_add_column(
2190
+ cls,
2191
+ operations: BatchOperations,
2192
+ column: Column[Any],
2193
+ *,
2194
+ insert_before: Optional[str] = None,
2195
+ insert_after: Optional[str] = None,
2196
+ if_not_exists: Optional[bool] = None,
2197
+ ) -> None:
2198
+ """Issue an "add column" instruction using the current
2199
+ batch migration context.
2200
+
2201
+ .. seealso::
2202
+
2203
+ :meth:`.Operations.add_column`
2204
+
2205
+ """
2206
+
2207
+ kw = {}
2208
+ if insert_before:
2209
+ kw["insert_before"] = insert_before
2210
+ if insert_after:
2211
+ kw["insert_after"] = insert_after
2212
+
2213
+ op = cls(
2214
+ operations.impl.table_name,
2215
+ column,
2216
+ schema=operations.impl.schema,
2217
+ if_not_exists=if_not_exists,
2218
+ **kw,
2219
+ )
2220
+ return operations.invoke(op)
2221
+
2222
+
2223
+ @Operations.register_operation("drop_column")
2224
+ @BatchOperations.register_operation("drop_column", "batch_drop_column")
2225
+ class DropColumnOp(AlterTableOp):
2226
+ """Represent a drop column operation."""
2227
+
2228
+ def __init__(
2229
+ self,
2230
+ table_name: str,
2231
+ column_name: str,
2232
+ *,
2233
+ schema: Optional[str] = None,
2234
+ if_exists: Optional[bool] = None,
2235
+ _reverse: Optional[AddColumnOp] = None,
2236
+ **kw: Any,
2237
+ ) -> None:
2238
+ super().__init__(table_name, schema=schema)
2239
+ self.column_name = column_name
2240
+ self.kw = kw
2241
+ self.if_exists = if_exists
2242
+ self._reverse = _reverse
2243
+
2244
+ def to_diff_tuple(
2245
+ self,
2246
+ ) -> Tuple[str, Optional[str], str, Column[Any]]:
2247
+ return (
2248
+ "remove_column",
2249
+ self.schema,
2250
+ self.table_name,
2251
+ self.to_column(),
2252
+ )
2253
+
2254
+ def reverse(self) -> AddColumnOp:
2255
+ if self._reverse is None:
2256
+ raise ValueError(
2257
+ "operation is not reversible; "
2258
+ "original column is not present"
2259
+ )
2260
+
2261
+ op = AddColumnOp.from_column_and_tablename(
2262
+ self.schema, self.table_name, self._reverse.column
2263
+ )
2264
+ op.if_not_exists = self.if_exists
2265
+ return op
2266
+
2267
+ @classmethod
2268
+ def from_column_and_tablename(
2269
+ cls,
2270
+ schema: Optional[str],
2271
+ tname: str,
2272
+ col: Column[Any],
2273
+ ) -> DropColumnOp:
2274
+ return cls(
2275
+ tname,
2276
+ col.name,
2277
+ schema=schema,
2278
+ _reverse=AddColumnOp.from_column_and_tablename(schema, tname, col),
2279
+ )
2280
+
2281
+ def to_column(
2282
+ self, migration_context: Optional[MigrationContext] = None
2283
+ ) -> Column[Any]:
2284
+ if self._reverse is not None:
2285
+ return self._reverse.column
2286
+ schema_obj = schemaobj.SchemaObjects(migration_context)
2287
+ return schema_obj.column(self.column_name, NULLTYPE)
2288
+
2289
+ @classmethod
2290
+ def drop_column(
2291
+ cls,
2292
+ operations: Operations,
2293
+ table_name: str,
2294
+ column_name: str,
2295
+ *,
2296
+ schema: Optional[str] = None,
2297
+ **kw: Any,
2298
+ ) -> None:
2299
+ """Issue a "drop column" instruction using the current
2300
+ migration context.
2301
+
2302
+ e.g.::
2303
+
2304
+ drop_column("organization", "account_id")
2305
+
2306
+ :param table_name: name of table
2307
+ :param column_name: name of column
2308
+ :param schema: Optional schema name to operate within. To control
2309
+ quoting of the schema outside of the default behavior, use
2310
+ the SQLAlchemy construct
2311
+ :class:`~sqlalchemy.sql.elements.quoted_name`.
2312
+ :param if_exists: If True, adds IF EXISTS operator when
2313
+ dropping the new column for compatible dialects
2314
+
2315
+ .. versionadded:: 1.16.0
2316
+
2317
+ :param mssql_drop_check: Optional boolean. When ``True``, on
2318
+ Microsoft SQL Server only, first
2319
+ drop the CHECK constraint on the column using a
2320
+ SQL-script-compatible
2321
+ block that selects into a @variable from sys.check_constraints,
2322
+ then exec's a separate DROP CONSTRAINT for that constraint.
2323
+ :param mssql_drop_default: Optional boolean. When ``True``, on
2324
+ Microsoft SQL Server only, first
2325
+ drop the DEFAULT constraint on the column using a
2326
+ SQL-script-compatible
2327
+ block that selects into a @variable from sys.default_constraints,
2328
+ then exec's a separate DROP CONSTRAINT for that default.
2329
+ :param mssql_drop_foreign_key: Optional boolean. When ``True``, on
2330
+ Microsoft SQL Server only, first
2331
+ drop a single FOREIGN KEY constraint on the column using a
2332
+ SQL-script-compatible
2333
+ block that selects into a @variable from
2334
+ sys.foreign_keys/sys.foreign_key_columns,
2335
+ then exec's a separate DROP CONSTRAINT for that default. Only
2336
+ works if the column has exactly one FK constraint which refers to
2337
+ it, at the moment.
2338
+ """
2339
+
2340
+ op = cls(table_name, column_name, schema=schema, **kw)
2341
+ return operations.invoke(op)
2342
+
2343
+ @classmethod
2344
+ def batch_drop_column(
2345
+ cls, operations: BatchOperations, column_name: str, **kw: Any
2346
+ ) -> None:
2347
+ """Issue a "drop column" instruction using the current
2348
+ batch migration context.
2349
+
2350
+ .. seealso::
2351
+
2352
+ :meth:`.Operations.drop_column`
2353
+
2354
+ """
2355
+ op = cls(
2356
+ operations.impl.table_name,
2357
+ column_name,
2358
+ schema=operations.impl.schema,
2359
+ **kw,
2360
+ )
2361
+ return operations.invoke(op)
2362
+
2363
+
2364
+ @Operations.register_operation("bulk_insert")
2365
+ class BulkInsertOp(MigrateOperation):
2366
+ """Represent a bulk insert operation."""
2367
+
2368
+ def __init__(
2369
+ self,
2370
+ table: Union[Table, TableClause],
2371
+ rows: List[Dict[str, Any]],
2372
+ *,
2373
+ multiinsert: bool = True,
2374
+ ) -> None:
2375
+ self.table = table
2376
+ self.rows = rows
2377
+ self.multiinsert = multiinsert
2378
+
2379
+ @classmethod
2380
+ def bulk_insert(
2381
+ cls,
2382
+ operations: Operations,
2383
+ table: Union[Table, TableClause],
2384
+ rows: List[Dict[str, Any]],
2385
+ *,
2386
+ multiinsert: bool = True,
2387
+ ) -> None:
2388
+ """Issue a "bulk insert" operation using the current
2389
+ migration context.
2390
+
2391
+ This provides a means of representing an INSERT of multiple rows
2392
+ which works equally well in the context of executing on a live
2393
+ connection as well as that of generating a SQL script. In the
2394
+ case of a SQL script, the values are rendered inline into the
2395
+ statement.
2396
+
2397
+ e.g.::
2398
+
2399
+ from alembic import op
2400
+ from datetime import date
2401
+ from sqlalchemy.sql import table, column
2402
+ from sqlalchemy import String, Integer, Date
2403
+
2404
+ # Create an ad-hoc table to use for the insert statement.
2405
+ accounts_table = table(
2406
+ "account",
2407
+ column("id", Integer),
2408
+ column("name", String),
2409
+ column("create_date", Date),
2410
+ )
2411
+
2412
+ op.bulk_insert(
2413
+ accounts_table,
2414
+ [
2415
+ {
2416
+ "id": 1,
2417
+ "name": "John Smith",
2418
+ "create_date": date(2010, 10, 5),
2419
+ },
2420
+ {
2421
+ "id": 2,
2422
+ "name": "Ed Williams",
2423
+ "create_date": date(2007, 5, 27),
2424
+ },
2425
+ {
2426
+ "id": 3,
2427
+ "name": "Wendy Jones",
2428
+ "create_date": date(2008, 8, 15),
2429
+ },
2430
+ ],
2431
+ )
2432
+
2433
+ When using --sql mode, some datatypes may not render inline
2434
+ automatically, such as dates and other special types. When this
2435
+ issue is present, :meth:`.Operations.inline_literal` may be used::
2436
+
2437
+ op.bulk_insert(
2438
+ accounts_table,
2439
+ [
2440
+ {
2441
+ "id": 1,
2442
+ "name": "John Smith",
2443
+ "create_date": op.inline_literal("2010-10-05"),
2444
+ },
2445
+ {
2446
+ "id": 2,
2447
+ "name": "Ed Williams",
2448
+ "create_date": op.inline_literal("2007-05-27"),
2449
+ },
2450
+ {
2451
+ "id": 3,
2452
+ "name": "Wendy Jones",
2453
+ "create_date": op.inline_literal("2008-08-15"),
2454
+ },
2455
+ ],
2456
+ multiinsert=False,
2457
+ )
2458
+
2459
+ When using :meth:`.Operations.inline_literal` in conjunction with
2460
+ :meth:`.Operations.bulk_insert`, in order for the statement to work
2461
+ in "online" (e.g. non --sql) mode, the
2462
+ :paramref:`~.Operations.bulk_insert.multiinsert`
2463
+ flag should be set to ``False``, which will have the effect of
2464
+ individual INSERT statements being emitted to the database, each
2465
+ with a distinct VALUES clause, so that the "inline" values can
2466
+ still be rendered, rather than attempting to pass the values
2467
+ as bound parameters.
2468
+
2469
+ :param table: a table object which represents the target of the INSERT.
2470
+
2471
+ :param rows: a list of dictionaries indicating rows.
2472
+
2473
+ :param multiinsert: when at its default of True and --sql mode is not
2474
+ enabled, the INSERT statement will be executed using
2475
+ "executemany()" style, where all elements in the list of
2476
+ dictionaries are passed as bound parameters in a single
2477
+ list. Setting this to False results in individual INSERT
2478
+ statements being emitted per parameter set, and is needed
2479
+ in those cases where non-literal values are present in the
2480
+ parameter sets.
2481
+
2482
+ """
2483
+
2484
+ op = cls(table, rows, multiinsert=multiinsert)
2485
+ operations.invoke(op)
2486
+
2487
+
2488
+ @Operations.register_operation("execute")
2489
+ @BatchOperations.register_operation("execute", "batch_execute")
2490
+ class ExecuteSQLOp(MigrateOperation):
2491
+ """Represent an execute SQL operation."""
2492
+
2493
+ def __init__(
2494
+ self,
2495
+ sqltext: Union[Executable, str],
2496
+ *,
2497
+ execution_options: Optional[dict[str, Any]] = None,
2498
+ ) -> None:
2499
+ self.sqltext = sqltext
2500
+ self.execution_options = execution_options
2501
+
2502
+ @classmethod
2503
+ def execute(
2504
+ cls,
2505
+ operations: Operations,
2506
+ sqltext: Union[Executable, str],
2507
+ *,
2508
+ execution_options: Optional[dict[str, Any]] = None,
2509
+ ) -> None:
2510
+ r"""Execute the given SQL using the current migration context.
2511
+
2512
+ The given SQL can be a plain string, e.g.::
2513
+
2514
+ op.execute("INSERT INTO table (foo) VALUES ('some value')")
2515
+
2516
+ Or it can be any kind of Core SQL Expression construct, such as
2517
+ below where we use an update construct::
2518
+
2519
+ from sqlalchemy.sql import table, column
2520
+ from sqlalchemy import String
2521
+ from alembic import op
2522
+
2523
+ account = table("account", column("name", String))
2524
+ op.execute(
2525
+ account.update()
2526
+ .where(account.c.name == op.inline_literal("account 1"))
2527
+ .values({"name": op.inline_literal("account 2")})
2528
+ )
2529
+
2530
+ Above, we made use of the SQLAlchemy
2531
+ :func:`sqlalchemy.sql.expression.table` and
2532
+ :func:`sqlalchemy.sql.expression.column` constructs to make a brief,
2533
+ ad-hoc table construct just for our UPDATE statement. A full
2534
+ :class:`~sqlalchemy.schema.Table` construct of course works perfectly
2535
+ fine as well, though note it's a recommended practice to at least
2536
+ ensure the definition of a table is self-contained within the migration
2537
+ script, rather than imported from a module that may break compatibility
2538
+ with older migrations.
2539
+
2540
+ In a SQL script context, the statement is emitted directly to the
2541
+ output stream. There is *no* return result, however, as this
2542
+ function is oriented towards generating a change script
2543
+ that can run in "offline" mode. Additionally, parameterized
2544
+ statements are discouraged here, as they *will not work* in offline
2545
+ mode. Above, we use :meth:`.inline_literal` where parameters are
2546
+ to be used.
2547
+
2548
+ For full interaction with a connected database where parameters can
2549
+ also be used normally, use the "bind" available from the context::
2550
+
2551
+ from alembic import op
2552
+
2553
+ connection = op.get_bind()
2554
+
2555
+ connection.execute(
2556
+ account.update()
2557
+ .where(account.c.name == "account 1")
2558
+ .values({"name": "account 2"})
2559
+ )
2560
+
2561
+ Additionally, when passing the statement as a plain string, it is first
2562
+ coerced into a :func:`sqlalchemy.sql.expression.text` construct
2563
+ before being passed along. In the less likely case that the
2564
+ literal SQL string contains a colon, it must be escaped with a
2565
+ backslash, as::
2566
+
2567
+ op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')")
2568
+
2569
+
2570
+ :param sqltext: Any legal SQLAlchemy expression, including:
2571
+
2572
+ * a string
2573
+ * a :func:`sqlalchemy.sql.expression.text` construct.
2574
+ * a :func:`sqlalchemy.sql.expression.insert` construct.
2575
+ * a :func:`sqlalchemy.sql.expression.update` construct.
2576
+ * a :func:`sqlalchemy.sql.expression.delete` construct.
2577
+ * Any "executable" described in SQLAlchemy Core documentation,
2578
+ noting that no result set is returned.
2579
+
2580
+ .. note:: when passing a plain string, the statement is coerced into
2581
+ a :func:`sqlalchemy.sql.expression.text` construct. This construct
2582
+ considers symbols with colons, e.g. ``:foo`` to be bound parameters.
2583
+ To avoid this, ensure that colon symbols are escaped, e.g.
2584
+ ``\:foo``.
2585
+
2586
+ :param execution_options: Optional dictionary of
2587
+ execution options, will be passed to
2588
+ :meth:`sqlalchemy.engine.Connection.execution_options`.
2589
+ """
2590
+ op = cls(sqltext, execution_options=execution_options)
2591
+ return operations.invoke(op)
2592
+
2593
+ @classmethod
2594
+ def batch_execute(
2595
+ cls,
2596
+ operations: Operations,
2597
+ sqltext: Union[Executable, str],
2598
+ *,
2599
+ execution_options: Optional[dict[str, Any]] = None,
2600
+ ) -> None:
2601
+ """Execute the given SQL using the current migration context.
2602
+
2603
+ .. seealso::
2604
+
2605
+ :meth:`.Operations.execute`
2606
+
2607
+ """
2608
+ return cls.execute(
2609
+ operations, sqltext, execution_options=execution_options
2610
+ )
2611
+
2612
+ def to_diff_tuple(self) -> Tuple[str, Union[Executable, str]]:
2613
+ return ("execute", self.sqltext)
2614
+
2615
+
2616
+ class OpContainer(MigrateOperation):
2617
+ """Represent a sequence of operations operation."""
2618
+
2619
+ def __init__(self, ops: Sequence[MigrateOperation] = ()) -> None:
2620
+ self.ops = list(ops)
2621
+
2622
+ def is_empty(self) -> bool:
2623
+ return not self.ops
2624
+
2625
+ def as_diffs(self) -> Any:
2626
+ return list(OpContainer._ops_as_diffs(self))
2627
+
2628
+ @classmethod
2629
+ def _ops_as_diffs(
2630
+ cls, migrations: OpContainer
2631
+ ) -> Iterator[Tuple[Any, ...]]:
2632
+ for op in migrations.ops:
2633
+ if hasattr(op, "ops"):
2634
+ yield from cls._ops_as_diffs(cast("OpContainer", op))
2635
+ else:
2636
+ yield op.to_diff_tuple()
2637
+
2638
+
2639
+ class ModifyTableOps(OpContainer):
2640
+ """Contains a sequence of operations that all apply to a single Table."""
2641
+
2642
+ def __init__(
2643
+ self,
2644
+ table_name: str,
2645
+ ops: Sequence[MigrateOperation],
2646
+ *,
2647
+ schema: Optional[str] = None,
2648
+ ) -> None:
2649
+ super().__init__(ops)
2650
+ self.table_name = table_name
2651
+ self.schema = schema
2652
+
2653
+ def reverse(self) -> ModifyTableOps:
2654
+ return ModifyTableOps(
2655
+ self.table_name,
2656
+ ops=list(reversed([op.reverse() for op in self.ops])),
2657
+ schema=self.schema,
2658
+ )
2659
+
2660
+
2661
+ class UpgradeOps(OpContainer):
2662
+ """contains a sequence of operations that would apply to the
2663
+ 'upgrade' stream of a script.
2664
+
2665
+ .. seealso::
2666
+
2667
+ :ref:`customizing_revision`
2668
+
2669
+ """
2670
+
2671
+ def __init__(
2672
+ self,
2673
+ ops: Sequence[MigrateOperation] = (),
2674
+ upgrade_token: str = "upgrades",
2675
+ ) -> None:
2676
+ super().__init__(ops=ops)
2677
+ self.upgrade_token = upgrade_token
2678
+
2679
+ def reverse_into(self, downgrade_ops: DowngradeOps) -> DowngradeOps:
2680
+ downgrade_ops.ops[:] = list(
2681
+ reversed([op.reverse() for op in self.ops])
2682
+ )
2683
+ return downgrade_ops
2684
+
2685
+ def reverse(self) -> DowngradeOps:
2686
+ return self.reverse_into(DowngradeOps(ops=[]))
2687
+
2688
+
2689
+ class DowngradeOps(OpContainer):
2690
+ """contains a sequence of operations that would apply to the
2691
+ 'downgrade' stream of a script.
2692
+
2693
+ .. seealso::
2694
+
2695
+ :ref:`customizing_revision`
2696
+
2697
+ """
2698
+
2699
+ def __init__(
2700
+ self,
2701
+ ops: Sequence[MigrateOperation] = (),
2702
+ downgrade_token: str = "downgrades",
2703
+ ) -> None:
2704
+ super().__init__(ops=ops)
2705
+ self.downgrade_token = downgrade_token
2706
+
2707
+ def reverse(self) -> UpgradeOps:
2708
+ return UpgradeOps(
2709
+ ops=list(reversed([op.reverse() for op in self.ops]))
2710
+ )
2711
+
2712
+
2713
+ class MigrationScript(MigrateOperation):
2714
+ """represents a migration script.
2715
+
2716
+ E.g. when autogenerate encounters this object, this corresponds to the
2717
+ production of an actual script file.
2718
+
2719
+ A normal :class:`.MigrationScript` object would contain a single
2720
+ :class:`.UpgradeOps` and a single :class:`.DowngradeOps` directive.
2721
+ These are accessible via the ``.upgrade_ops`` and ``.downgrade_ops``
2722
+ attributes.
2723
+
2724
+ In the case of an autogenerate operation that runs multiple times,
2725
+ such as the multiple database example in the "multidb" template,
2726
+ the ``.upgrade_ops`` and ``.downgrade_ops`` attributes are disabled,
2727
+ and instead these objects should be accessed via the ``.upgrade_ops_list``
2728
+ and ``.downgrade_ops_list`` list-based attributes. These latter
2729
+ attributes are always available at the very least as single-element lists.
2730
+
2731
+ .. seealso::
2732
+
2733
+ :ref:`customizing_revision`
2734
+
2735
+ """
2736
+
2737
+ _needs_render: Optional[bool]
2738
+ _upgrade_ops: List[UpgradeOps]
2739
+ _downgrade_ops: List[DowngradeOps]
2740
+
2741
+ def __init__(
2742
+ self,
2743
+ rev_id: Optional[str],
2744
+ upgrade_ops: UpgradeOps,
2745
+ downgrade_ops: DowngradeOps,
2746
+ *,
2747
+ message: Optional[str] = None,
2748
+ imports: Set[str] = set(),
2749
+ head: Optional[str] = None,
2750
+ splice: Optional[bool] = None,
2751
+ branch_label: Optional[_RevIdType] = None,
2752
+ version_path: Union[str, os.PathLike[str], None] = None,
2753
+ depends_on: Optional[_RevIdType] = None,
2754
+ ) -> None:
2755
+ self.rev_id = rev_id
2756
+ self.message = message
2757
+ self.imports = imports
2758
+ self.head = head
2759
+ self.splice = splice
2760
+ self.branch_label = branch_label
2761
+ self.version_path = (
2762
+ pathlib.Path(version_path).as_posix() if version_path else None
2763
+ )
2764
+ self.depends_on = depends_on
2765
+ self.upgrade_ops = upgrade_ops
2766
+ self.downgrade_ops = downgrade_ops
2767
+
2768
+ @property
2769
+ def upgrade_ops(self) -> Optional[UpgradeOps]:
2770
+ """An instance of :class:`.UpgradeOps`.
2771
+
2772
+ .. seealso::
2773
+
2774
+ :attr:`.MigrationScript.upgrade_ops_list`
2775
+ """
2776
+ if len(self._upgrade_ops) > 1:
2777
+ raise ValueError(
2778
+ "This MigrationScript instance has a multiple-entry "
2779
+ "list for UpgradeOps; please use the "
2780
+ "upgrade_ops_list attribute."
2781
+ )
2782
+ elif not self._upgrade_ops:
2783
+ return None
2784
+ else:
2785
+ return self._upgrade_ops[0]
2786
+
2787
+ @upgrade_ops.setter
2788
+ def upgrade_ops(
2789
+ self, upgrade_ops: Union[UpgradeOps, List[UpgradeOps]]
2790
+ ) -> None:
2791
+ self._upgrade_ops = util.to_list(upgrade_ops)
2792
+ for elem in self._upgrade_ops:
2793
+ assert isinstance(elem, UpgradeOps)
2794
+
2795
+ @property
2796
+ def downgrade_ops(self) -> Optional[DowngradeOps]:
2797
+ """An instance of :class:`.DowngradeOps`.
2798
+
2799
+ .. seealso::
2800
+
2801
+ :attr:`.MigrationScript.downgrade_ops_list`
2802
+ """
2803
+ if len(self._downgrade_ops) > 1:
2804
+ raise ValueError(
2805
+ "This MigrationScript instance has a multiple-entry "
2806
+ "list for DowngradeOps; please use the "
2807
+ "downgrade_ops_list attribute."
2808
+ )
2809
+ elif not self._downgrade_ops:
2810
+ return None
2811
+ else:
2812
+ return self._downgrade_ops[0]
2813
+
2814
+ @downgrade_ops.setter
2815
+ def downgrade_ops(
2816
+ self, downgrade_ops: Union[DowngradeOps, List[DowngradeOps]]
2817
+ ) -> None:
2818
+ self._downgrade_ops = util.to_list(downgrade_ops)
2819
+ for elem in self._downgrade_ops:
2820
+ assert isinstance(elem, DowngradeOps)
2821
+
2822
+ @property
2823
+ def upgrade_ops_list(self) -> List[UpgradeOps]:
2824
+ """A list of :class:`.UpgradeOps` instances.
2825
+
2826
+ This is used in place of the :attr:`.MigrationScript.upgrade_ops`
2827
+ attribute when dealing with a revision operation that does
2828
+ multiple autogenerate passes.
2829
+
2830
+ """
2831
+ return self._upgrade_ops
2832
+
2833
+ @property
2834
+ def downgrade_ops_list(self) -> List[DowngradeOps]:
2835
+ """A list of :class:`.DowngradeOps` instances.
2836
+
2837
+ This is used in place of the :attr:`.MigrationScript.downgrade_ops`
2838
+ attribute when dealing with a revision operation that does
2839
+ multiple autogenerate passes.
2840
+
2841
+ """
2842
+ return self._downgrade_ops
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/schemaobj.py ADDED
@@ -0,0 +1,290 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ from typing import Any
7
+ from typing import Dict
8
+ from typing import List
9
+ from typing import Optional
10
+ from typing import Sequence
11
+ from typing import Tuple
12
+ from typing import TYPE_CHECKING
13
+ from typing import Union
14
+
15
+ from sqlalchemy import schema as sa_schema
16
+ from sqlalchemy.sql.schema import Column
17
+ from sqlalchemy.sql.schema import Constraint
18
+ from sqlalchemy.sql.schema import Index
19
+ from sqlalchemy.types import Integer
20
+ from sqlalchemy.types import NULLTYPE
21
+
22
+ from .. import util
23
+ from ..util import sqla_compat
24
+
25
+ if TYPE_CHECKING:
26
+ from sqlalchemy.sql.elements import ColumnElement
27
+ from sqlalchemy.sql.elements import TextClause
28
+ from sqlalchemy.sql.schema import CheckConstraint
29
+ from sqlalchemy.sql.schema import ForeignKey
30
+ from sqlalchemy.sql.schema import ForeignKeyConstraint
31
+ from sqlalchemy.sql.schema import MetaData
32
+ from sqlalchemy.sql.schema import PrimaryKeyConstraint
33
+ from sqlalchemy.sql.schema import Table
34
+ from sqlalchemy.sql.schema import UniqueConstraint
35
+ from sqlalchemy.sql.type_api import TypeEngine
36
+
37
+ from ..runtime.migration import MigrationContext
38
+
39
+
40
+ class SchemaObjects:
41
+ def __init__(
42
+ self, migration_context: Optional[MigrationContext] = None
43
+ ) -> None:
44
+ self.migration_context = migration_context
45
+
46
+ def primary_key_constraint(
47
+ self,
48
+ name: Optional[sqla_compat._ConstraintNameDefined],
49
+ table_name: str,
50
+ cols: Sequence[str],
51
+ schema: Optional[str] = None,
52
+ **dialect_kw,
53
+ ) -> PrimaryKeyConstraint:
54
+ m = self.metadata()
55
+ columns = [sa_schema.Column(n, NULLTYPE) for n in cols]
56
+ t = sa_schema.Table(table_name, m, *columns, schema=schema)
57
+ # SQLAlchemy primary key constraint name arg is wrongly typed on
58
+ # the SQLAlchemy side through 2.0.5 at least
59
+ p = sa_schema.PrimaryKeyConstraint(
60
+ *[t.c[n] for n in cols], name=name, **dialect_kw # type: ignore
61
+ )
62
+ return p
63
+
64
+ def foreign_key_constraint(
65
+ self,
66
+ name: Optional[sqla_compat._ConstraintNameDefined],
67
+ source: str,
68
+ referent: str,
69
+ local_cols: List[str],
70
+ remote_cols: List[str],
71
+ onupdate: Optional[str] = None,
72
+ ondelete: Optional[str] = None,
73
+ deferrable: Optional[bool] = None,
74
+ source_schema: Optional[str] = None,
75
+ referent_schema: Optional[str] = None,
76
+ initially: Optional[str] = None,
77
+ match: Optional[str] = None,
78
+ **dialect_kw,
79
+ ) -> ForeignKeyConstraint:
80
+ m = self.metadata()
81
+ if source == referent and source_schema == referent_schema:
82
+ t1_cols = local_cols + remote_cols
83
+ else:
84
+ t1_cols = local_cols
85
+ sa_schema.Table(
86
+ referent,
87
+ m,
88
+ *[sa_schema.Column(n, NULLTYPE) for n in remote_cols],
89
+ schema=referent_schema,
90
+ )
91
+
92
+ t1 = sa_schema.Table(
93
+ source,
94
+ m,
95
+ *[
96
+ sa_schema.Column(n, NULLTYPE)
97
+ for n in util.unique_list(t1_cols)
98
+ ],
99
+ schema=source_schema,
100
+ )
101
+
102
+ tname = (
103
+ "%s.%s" % (referent_schema, referent)
104
+ if referent_schema
105
+ else referent
106
+ )
107
+
108
+ dialect_kw["match"] = match
109
+
110
+ f = sa_schema.ForeignKeyConstraint(
111
+ local_cols,
112
+ ["%s.%s" % (tname, n) for n in remote_cols],
113
+ name=name,
114
+ onupdate=onupdate,
115
+ ondelete=ondelete,
116
+ deferrable=deferrable,
117
+ initially=initially,
118
+ **dialect_kw,
119
+ )
120
+ t1.append_constraint(f)
121
+
122
+ return f
123
+
124
+ def unique_constraint(
125
+ self,
126
+ name: Optional[sqla_compat._ConstraintNameDefined],
127
+ source: str,
128
+ local_cols: Sequence[str],
129
+ schema: Optional[str] = None,
130
+ **kw,
131
+ ) -> UniqueConstraint:
132
+ t = sa_schema.Table(
133
+ source,
134
+ self.metadata(),
135
+ *[sa_schema.Column(n, NULLTYPE) for n in local_cols],
136
+ schema=schema,
137
+ )
138
+ kw["name"] = name
139
+ uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw)
140
+ # TODO: need event tests to ensure the event
141
+ # is fired off here
142
+ t.append_constraint(uq)
143
+ return uq
144
+
145
+ def check_constraint(
146
+ self,
147
+ name: Optional[sqla_compat._ConstraintNameDefined],
148
+ source: str,
149
+ condition: Union[str, TextClause, ColumnElement[Any]],
150
+ schema: Optional[str] = None,
151
+ **kw,
152
+ ) -> Union[CheckConstraint]:
153
+ t = sa_schema.Table(
154
+ source,
155
+ self.metadata(),
156
+ sa_schema.Column("x", Integer),
157
+ schema=schema,
158
+ )
159
+ ck = sa_schema.CheckConstraint(condition, name=name, **kw)
160
+ t.append_constraint(ck)
161
+ return ck
162
+
163
+ def generic_constraint(
164
+ self,
165
+ name: Optional[sqla_compat._ConstraintNameDefined],
166
+ table_name: str,
167
+ type_: Optional[str],
168
+ schema: Optional[str] = None,
169
+ **kw,
170
+ ) -> Any:
171
+ t = self.table(table_name, schema=schema)
172
+ types: Dict[Optional[str], Any] = {
173
+ "foreignkey": lambda name: sa_schema.ForeignKeyConstraint(
174
+ [], [], name=name
175
+ ),
176
+ "primary": sa_schema.PrimaryKeyConstraint,
177
+ "unique": sa_schema.UniqueConstraint,
178
+ "check": lambda name: sa_schema.CheckConstraint("", name=name),
179
+ None: sa_schema.Constraint,
180
+ }
181
+ try:
182
+ const = types[type_]
183
+ except KeyError as ke:
184
+ raise TypeError(
185
+ "'type' can be one of %s"
186
+ % ", ".join(sorted(repr(x) for x in types))
187
+ ) from ke
188
+ else:
189
+ const = const(name=name)
190
+ t.append_constraint(const)
191
+ return const
192
+
193
+ def metadata(self) -> MetaData:
194
+ kw = {}
195
+ if (
196
+ self.migration_context is not None
197
+ and "target_metadata" in self.migration_context.opts
198
+ ):
199
+ mt = self.migration_context.opts["target_metadata"]
200
+ if hasattr(mt, "naming_convention"):
201
+ kw["naming_convention"] = mt.naming_convention
202
+ return sa_schema.MetaData(**kw)
203
+
204
+ def table(self, name: str, *columns, **kw) -> Table:
205
+ m = self.metadata()
206
+
207
+ cols = [
208
+ sqla_compat._copy(c) if c.table is not None else c
209
+ for c in columns
210
+ if isinstance(c, Column)
211
+ ]
212
+ # these flags have already added their UniqueConstraint /
213
+ # Index objects to the table, so flip them off here.
214
+ # SQLAlchemy tometadata() avoids this instead by preserving the
215
+ # flags and skipping the constraints that have _type_bound on them,
216
+ # but for a migration we'd rather list out the constraints
217
+ # explicitly.
218
+ _constraints_included = kw.pop("_constraints_included", False)
219
+ if _constraints_included:
220
+ for c in cols:
221
+ c.unique = c.index = False
222
+
223
+ t = sa_schema.Table(name, m, *cols, **kw)
224
+
225
+ constraints = [
226
+ (
227
+ sqla_compat._copy(elem, target_table=t)
228
+ if getattr(elem, "parent", None) is not t
229
+ and getattr(elem, "parent", None) is not None
230
+ else elem
231
+ )
232
+ for elem in columns
233
+ if isinstance(elem, (Constraint, Index))
234
+ ]
235
+
236
+ for const in constraints:
237
+ t.append_constraint(const)
238
+
239
+ for f in t.foreign_keys:
240
+ self._ensure_table_for_fk(m, f)
241
+ return t
242
+
243
+ def column(self, name: str, type_: TypeEngine, **kw) -> Column:
244
+ return sa_schema.Column(name, type_, **kw)
245
+
246
+ def index(
247
+ self,
248
+ name: Optional[str],
249
+ tablename: Optional[str],
250
+ columns: Sequence[Union[str, TextClause, ColumnElement[Any]]],
251
+ schema: Optional[str] = None,
252
+ **kw,
253
+ ) -> Index:
254
+ t = sa_schema.Table(
255
+ tablename or "no_table",
256
+ self.metadata(),
257
+ schema=schema,
258
+ )
259
+ kw["_table"] = t
260
+ idx = sa_schema.Index(
261
+ name,
262
+ *[util.sqla_compat._textual_index_column(t, n) for n in columns],
263
+ **kw,
264
+ )
265
+ return idx
266
+
267
+ def _parse_table_key(self, table_key: str) -> Tuple[Optional[str], str]:
268
+ if "." in table_key:
269
+ tokens = table_key.split(".")
270
+ sname: Optional[str] = ".".join(tokens[0:-1])
271
+ tname = tokens[-1]
272
+ else:
273
+ tname = table_key
274
+ sname = None
275
+ return (sname, tname)
276
+
277
+ def _ensure_table_for_fk(self, metadata: MetaData, fk: ForeignKey) -> None:
278
+ """create a placeholder Table object for the referent of a
279
+ ForeignKey.
280
+
281
+ """
282
+ if isinstance(fk._colspec, str):
283
+ table_key, cname = fk._colspec.rsplit(".", 1)
284
+ sname, tname = self._parse_table_key(table_key)
285
+ if table_key not in metadata.tables:
286
+ rel_t = sa_schema.Table(tname, metadata, schema=sname)
287
+ else:
288
+ rel_t = metadata.tables[table_key]
289
+ if cname not in rel_t.c:
290
+ rel_t.append_column(sa_schema.Column(cname, NULLTYPE))
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/operations/toimpl.py ADDED
@@ -0,0 +1,242 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from typing import TYPE_CHECKING
5
+
6
+ from sqlalchemy import schema as sa_schema
7
+
8
+ from . import ops
9
+ from .base import Operations
10
+ from ..util.sqla_compat import _copy
11
+ from ..util.sqla_compat import sqla_2
12
+
13
+ if TYPE_CHECKING:
14
+ from sqlalchemy.sql.schema import Table
15
+
16
+
17
+ @Operations.implementation_for(ops.AlterColumnOp)
18
+ def alter_column(
19
+ operations: "Operations", operation: "ops.AlterColumnOp"
20
+ ) -> None:
21
+ compiler = operations.impl.dialect.statement_compiler(
22
+ operations.impl.dialect, None
23
+ )
24
+
25
+ existing_type = operation.existing_type
26
+ existing_nullable = operation.existing_nullable
27
+ existing_server_default = operation.existing_server_default
28
+ type_ = operation.modify_type
29
+ column_name = operation.column_name
30
+ table_name = operation.table_name
31
+ schema = operation.schema
32
+ server_default = operation.modify_server_default
33
+ new_column_name = operation.modify_name
34
+ nullable = operation.modify_nullable
35
+ comment = operation.modify_comment
36
+ existing_comment = operation.existing_comment
37
+
38
+ def _count_constraint(constraint):
39
+ return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and (
40
+ not constraint._create_rule or constraint._create_rule(compiler)
41
+ )
42
+
43
+ if existing_type and type_:
44
+ t = operations.schema_obj.table(
45
+ table_name,
46
+ sa_schema.Column(column_name, existing_type),
47
+ schema=schema,
48
+ )
49
+ for constraint in t.constraints:
50
+ if _count_constraint(constraint):
51
+ operations.impl.drop_constraint(constraint)
52
+
53
+ operations.impl.alter_column(
54
+ table_name,
55
+ column_name,
56
+ nullable=nullable,
57
+ server_default=server_default,
58
+ name=new_column_name,
59
+ type_=type_,
60
+ schema=schema,
61
+ existing_type=existing_type,
62
+ existing_server_default=existing_server_default,
63
+ existing_nullable=existing_nullable,
64
+ comment=comment,
65
+ existing_comment=existing_comment,
66
+ **operation.kw,
67
+ )
68
+
69
+ if type_:
70
+ t = operations.schema_obj.table(
71
+ table_name,
72
+ operations.schema_obj.column(column_name, type_),
73
+ schema=schema,
74
+ )
75
+ for constraint in t.constraints:
76
+ if _count_constraint(constraint):
77
+ operations.impl.add_constraint(constraint)
78
+
79
+
80
+ @Operations.implementation_for(ops.DropTableOp)
81
+ def drop_table(operations: "Operations", operation: "ops.DropTableOp") -> None:
82
+ kw = {}
83
+ if operation.if_exists is not None:
84
+ kw["if_exists"] = operation.if_exists
85
+ operations.impl.drop_table(
86
+ operation.to_table(operations.migration_context), **kw
87
+ )
88
+
89
+
90
+ @Operations.implementation_for(ops.DropColumnOp)
91
+ def drop_column(
92
+ operations: "Operations", operation: "ops.DropColumnOp"
93
+ ) -> None:
94
+ column = operation.to_column(operations.migration_context)
95
+ operations.impl.drop_column(
96
+ operation.table_name,
97
+ column,
98
+ schema=operation.schema,
99
+ if_exists=operation.if_exists,
100
+ **operation.kw,
101
+ )
102
+
103
+
104
+ @Operations.implementation_for(ops.CreateIndexOp)
105
+ def create_index(
106
+ operations: "Operations", operation: "ops.CreateIndexOp"
107
+ ) -> None:
108
+ idx = operation.to_index(operations.migration_context)
109
+ kw = {}
110
+ if operation.if_not_exists is not None:
111
+ kw["if_not_exists"] = operation.if_not_exists
112
+ operations.impl.create_index(idx, **kw)
113
+
114
+
115
+ @Operations.implementation_for(ops.DropIndexOp)
116
+ def drop_index(operations: "Operations", operation: "ops.DropIndexOp") -> None:
117
+ kw = {}
118
+ if operation.if_exists is not None:
119
+ kw["if_exists"] = operation.if_exists
120
+
121
+ operations.impl.drop_index(
122
+ operation.to_index(operations.migration_context),
123
+ **kw,
124
+ )
125
+
126
+
127
+ @Operations.implementation_for(ops.CreateTableOp)
128
+ def create_table(
129
+ operations: "Operations", operation: "ops.CreateTableOp"
130
+ ) -> "Table":
131
+ kw = {}
132
+ if operation.if_not_exists is not None:
133
+ kw["if_not_exists"] = operation.if_not_exists
134
+ table = operation.to_table(operations.migration_context)
135
+ operations.impl.create_table(table, **kw)
136
+ return table
137
+
138
+
139
+ @Operations.implementation_for(ops.RenameTableOp)
140
+ def rename_table(
141
+ operations: "Operations", operation: "ops.RenameTableOp"
142
+ ) -> None:
143
+ operations.impl.rename_table(
144
+ operation.table_name, operation.new_table_name, schema=operation.schema
145
+ )
146
+
147
+
148
+ @Operations.implementation_for(ops.CreateTableCommentOp)
149
+ def create_table_comment(
150
+ operations: "Operations", operation: "ops.CreateTableCommentOp"
151
+ ) -> None:
152
+ table = operation.to_table(operations.migration_context)
153
+ operations.impl.create_table_comment(table)
154
+
155
+
156
+ @Operations.implementation_for(ops.DropTableCommentOp)
157
+ def drop_table_comment(
158
+ operations: "Operations", operation: "ops.DropTableCommentOp"
159
+ ) -> None:
160
+ table = operation.to_table(operations.migration_context)
161
+ operations.impl.drop_table_comment(table)
162
+
163
+
164
+ @Operations.implementation_for(ops.AddColumnOp)
165
+ def add_column(operations: "Operations", operation: "ops.AddColumnOp") -> None:
166
+ table_name = operation.table_name
167
+ column = operation.column
168
+ schema = operation.schema
169
+ kw = operation.kw
170
+
171
+ if column.table is not None:
172
+ column = _copy(column)
173
+
174
+ t = operations.schema_obj.table(table_name, column, schema=schema)
175
+ operations.impl.add_column(
176
+ table_name,
177
+ column,
178
+ schema=schema,
179
+ if_not_exists=operation.if_not_exists,
180
+ **kw,
181
+ )
182
+
183
+ for constraint in t.constraints:
184
+ if not isinstance(constraint, sa_schema.PrimaryKeyConstraint):
185
+ operations.impl.add_constraint(constraint)
186
+ for index in t.indexes:
187
+ operations.impl.create_index(index)
188
+
189
+ with_comment = (
190
+ operations.impl.dialect.supports_comments
191
+ and not operations.impl.dialect.inline_comments
192
+ )
193
+ comment = column.comment
194
+ if comment and with_comment:
195
+ operations.impl.create_column_comment(column)
196
+
197
+
198
+ @Operations.implementation_for(ops.AddConstraintOp)
199
+ def create_constraint(
200
+ operations: "Operations", operation: "ops.AddConstraintOp"
201
+ ) -> None:
202
+ operations.impl.add_constraint(
203
+ operation.to_constraint(operations.migration_context)
204
+ )
205
+
206
+
207
+ @Operations.implementation_for(ops.DropConstraintOp)
208
+ def drop_constraint(
209
+ operations: "Operations", operation: "ops.DropConstraintOp"
210
+ ) -> None:
211
+ kw = {}
212
+ if operation.if_exists is not None:
213
+ if not sqla_2:
214
+ raise NotImplementedError("SQLAlchemy 2.0 required")
215
+ kw["if_exists"] = operation.if_exists
216
+ operations.impl.drop_constraint(
217
+ operations.schema_obj.generic_constraint(
218
+ operation.constraint_name,
219
+ operation.table_name,
220
+ operation.constraint_type,
221
+ schema=operation.schema,
222
+ ),
223
+ **kw,
224
+ )
225
+
226
+
227
+ @Operations.implementation_for(ops.BulkInsertOp)
228
+ def bulk_insert(
229
+ operations: "Operations", operation: "ops.BulkInsertOp"
230
+ ) -> None:
231
+ operations.impl.bulk_insert( # type: ignore[union-attr]
232
+ operation.table, operation.rows, multiinsert=operation.multiinsert
233
+ )
234
+
235
+
236
+ @Operations.implementation_for(ops.ExecuteSQLOp)
237
+ def execute_sql(
238
+ operations: "Operations", operation: "ops.ExecuteSQLOp"
239
+ ) -> None:
240
+ operations.migration_context.impl.execute(
241
+ operation.sqltext, execution_options=operation.execution_options
242
+ )
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/py.typed ADDED
File without changes
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/runtime/__init__.py ADDED
File without changes
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/runtime/environment.py ADDED
@@ -0,0 +1,1051 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+ from typing import Callable
5
+ from typing import Collection
6
+ from typing import Dict
7
+ from typing import List
8
+ from typing import Mapping
9
+ from typing import MutableMapping
10
+ from typing import Optional
11
+ from typing import overload
12
+ from typing import Sequence
13
+ from typing import TextIO
14
+ from typing import Tuple
15
+ from typing import TYPE_CHECKING
16
+ from typing import Union
17
+
18
+ from sqlalchemy.sql.schema import Column
19
+ from sqlalchemy.sql.schema import FetchedValue
20
+ from typing_extensions import ContextManager
21
+ from typing_extensions import Literal
22
+
23
+ from .migration import _ProxyTransaction
24
+ from .migration import MigrationContext
25
+ from .. import util
26
+ from ..operations import Operations
27
+ from ..script.revision import _GetRevArg
28
+
29
+ if TYPE_CHECKING:
30
+ from sqlalchemy.engine import URL
31
+ from sqlalchemy.engine.base import Connection
32
+ from sqlalchemy.sql import Executable
33
+ from sqlalchemy.sql.schema import MetaData
34
+ from sqlalchemy.sql.schema import SchemaItem
35
+ from sqlalchemy.sql.type_api import TypeEngine
36
+
37
+ from .migration import MigrationInfo
38
+ from ..autogenerate.api import AutogenContext
39
+ from ..config import Config
40
+ from ..ddl import DefaultImpl
41
+ from ..operations.ops import MigrationScript
42
+ from ..script.base import ScriptDirectory
43
+
44
+ _RevNumber = Optional[Union[str, Tuple[str, ...]]]
45
+
46
+ ProcessRevisionDirectiveFn = Callable[
47
+ [MigrationContext, _GetRevArg, List["MigrationScript"]], None
48
+ ]
49
+
50
+ RenderItemFn = Callable[
51
+ [str, Any, "AutogenContext"], Union[str, Literal[False]]
52
+ ]
53
+
54
+ NameFilterType = Literal[
55
+ "schema",
56
+ "table",
57
+ "column",
58
+ "index",
59
+ "unique_constraint",
60
+ "foreign_key_constraint",
61
+ ]
62
+ NameFilterParentNames = MutableMapping[
63
+ Literal["schema_name", "table_name", "schema_qualified_table_name"],
64
+ Optional[str],
65
+ ]
66
+ IncludeNameFn = Callable[
67
+ [Optional[str], NameFilterType, NameFilterParentNames], bool
68
+ ]
69
+
70
+ IncludeObjectFn = Callable[
71
+ [
72
+ "SchemaItem",
73
+ Optional[str],
74
+ NameFilterType,
75
+ bool,
76
+ Optional["SchemaItem"],
77
+ ],
78
+ bool,
79
+ ]
80
+
81
+ OnVersionApplyFn = Callable[
82
+ [MigrationContext, "MigrationInfo", Collection[Any], Mapping[str, Any]],
83
+ None,
84
+ ]
85
+
86
+ CompareServerDefault = Callable[
87
+ [
88
+ MigrationContext,
89
+ "Column[Any]",
90
+ "Column[Any]",
91
+ Optional[str],
92
+ Optional[FetchedValue],
93
+ Optional[str],
94
+ ],
95
+ Optional[bool],
96
+ ]
97
+
98
+ CompareType = Callable[
99
+ [
100
+ MigrationContext,
101
+ "Column[Any]",
102
+ "Column[Any]",
103
+ "TypeEngine[Any]",
104
+ "TypeEngine[Any]",
105
+ ],
106
+ Optional[bool],
107
+ ]
108
+
109
+
110
+ class EnvironmentContext(util.ModuleClsProxy):
111
+ """A configurational facade made available in an ``env.py`` script.
112
+
113
+ The :class:`.EnvironmentContext` acts as a *facade* to the more
114
+ nuts-and-bolts objects of :class:`.MigrationContext` as well as certain
115
+ aspects of :class:`.Config`,
116
+ within the context of the ``env.py`` script that is invoked by
117
+ most Alembic commands.
118
+
119
+ :class:`.EnvironmentContext` is normally instantiated
120
+ when a command in :mod:`alembic.command` is run. It then makes
121
+ itself available in the ``alembic.context`` module for the scope
122
+ of the command. From within an ``env.py`` script, the current
123
+ :class:`.EnvironmentContext` is available by importing this module.
124
+
125
+ :class:`.EnvironmentContext` also supports programmatic usage.
126
+ At this level, it acts as a Python context manager, that is, is
127
+ intended to be used using the
128
+ ``with:`` statement. A typical use of :class:`.EnvironmentContext`::
129
+
130
+ from alembic.config import Config
131
+ from alembic.script import ScriptDirectory
132
+
133
+ config = Config()
134
+ config.set_main_option("script_location", "myapp:migrations")
135
+ script = ScriptDirectory.from_config(config)
136
+
137
+
138
+ def my_function(rev, context):
139
+ '''do something with revision "rev", which
140
+ will be the current database revision,
141
+ and "context", which is the MigrationContext
142
+ that the env.py will create'''
143
+
144
+
145
+ with EnvironmentContext(
146
+ config,
147
+ script,
148
+ fn=my_function,
149
+ as_sql=False,
150
+ starting_rev="base",
151
+ destination_rev="head",
152
+ tag="sometag",
153
+ ):
154
+ script.run_env()
155
+
156
+ The above script will invoke the ``env.py`` script
157
+ within the migration environment. If and when ``env.py``
158
+ calls :meth:`.MigrationContext.run_migrations`, the
159
+ ``my_function()`` function above will be called
160
+ by the :class:`.MigrationContext`, given the context
161
+ itself as well as the current revision in the database.
162
+
163
+ .. note::
164
+
165
+ For most API usages other than full blown
166
+ invocation of migration scripts, the :class:`.MigrationContext`
167
+ and :class:`.ScriptDirectory` objects can be created and
168
+ used directly. The :class:`.EnvironmentContext` object
169
+ is *only* needed when you need to actually invoke the
170
+ ``env.py`` module present in the migration environment.
171
+
172
+ """
173
+
174
+ _migration_context: Optional[MigrationContext] = None
175
+
176
+ config: Config = None # type:ignore[assignment]
177
+ """An instance of :class:`.Config` representing the
178
+ configuration file contents as well as other variables
179
+ set programmatically within it."""
180
+
181
+ script: ScriptDirectory = None # type:ignore[assignment]
182
+ """An instance of :class:`.ScriptDirectory` which provides
183
+ programmatic access to version files within the ``versions/``
184
+ directory.
185
+
186
+ """
187
+
188
+ def __init__(
189
+ self, config: Config, script: ScriptDirectory, **kw: Any
190
+ ) -> None:
191
+ r"""Construct a new :class:`.EnvironmentContext`.
192
+
193
+ :param config: a :class:`.Config` instance.
194
+ :param script: a :class:`.ScriptDirectory` instance.
195
+ :param \**kw: keyword options that will be ultimately
196
+ passed along to the :class:`.MigrationContext` when
197
+ :meth:`.EnvironmentContext.configure` is called.
198
+
199
+ """
200
+ self.config = config
201
+ self.script = script
202
+ self.context_opts = kw
203
+
204
+ def __enter__(self) -> EnvironmentContext:
205
+ """Establish a context which provides a
206
+ :class:`.EnvironmentContext` object to
207
+ env.py scripts.
208
+
209
+ The :class:`.EnvironmentContext` will
210
+ be made available as ``from alembic import context``.
211
+
212
+ """
213
+ self._install_proxy()
214
+ return self
215
+
216
+ def __exit__(self, *arg: Any, **kw: Any) -> None:
217
+ self._remove_proxy()
218
+
219
+ def is_offline_mode(self) -> bool:
220
+ """Return True if the current migrations environment
221
+ is running in "offline mode".
222
+
223
+ This is ``True`` or ``False`` depending
224
+ on the ``--sql`` flag passed.
225
+
226
+ This function does not require that the :class:`.MigrationContext`
227
+ has been configured.
228
+
229
+ """
230
+ return self.context_opts.get("as_sql", False) # type: ignore[no-any-return] # noqa: E501
231
+
232
+ def is_transactional_ddl(self) -> bool:
233
+ """Return True if the context is configured to expect a
234
+ transactional DDL capable backend.
235
+
236
+ This defaults to the type of database in use, and
237
+ can be overridden by the ``transactional_ddl`` argument
238
+ to :meth:`.configure`
239
+
240
+ This function requires that a :class:`.MigrationContext`
241
+ has first been made available via :meth:`.configure`.
242
+
243
+ """
244
+ return self.get_context().impl.transactional_ddl
245
+
246
+ def requires_connection(self) -> bool:
247
+ return not self.is_offline_mode()
248
+
249
+ def get_head_revision(self) -> _RevNumber:
250
+ """Return the hex identifier of the 'head' script revision.
251
+
252
+ If the script directory has multiple heads, this
253
+ method raises a :class:`.CommandError`;
254
+ :meth:`.EnvironmentContext.get_head_revisions` should be preferred.
255
+
256
+ This function does not require that the :class:`.MigrationContext`
257
+ has been configured.
258
+
259
+ .. seealso:: :meth:`.EnvironmentContext.get_head_revisions`
260
+
261
+ """
262
+ return self.script.as_revision_number("head")
263
+
264
+ def get_head_revisions(self) -> _RevNumber:
265
+ """Return the hex identifier of the 'heads' script revision(s).
266
+
267
+ This returns a tuple containing the version number of all
268
+ heads in the script directory.
269
+
270
+ This function does not require that the :class:`.MigrationContext`
271
+ has been configured.
272
+
273
+ """
274
+ return self.script.as_revision_number("heads")
275
+
276
+ def get_starting_revision_argument(self) -> _RevNumber:
277
+ """Return the 'starting revision' argument,
278
+ if the revision was passed using ``start:end``.
279
+
280
+ This is only meaningful in "offline" mode.
281
+ Returns ``None`` if no value is available
282
+ or was configured.
283
+
284
+ This function does not require that the :class:`.MigrationContext`
285
+ has been configured.
286
+
287
+ """
288
+ if self._migration_context is not None:
289
+ return self.script.as_revision_number(
290
+ self.get_context()._start_from_rev
291
+ )
292
+ elif "starting_rev" in self.context_opts:
293
+ return self.script.as_revision_number(
294
+ self.context_opts["starting_rev"]
295
+ )
296
+ else:
297
+ # this should raise only in the case that a command
298
+ # is being run where the "starting rev" is never applicable;
299
+ # this is to catch scripts which rely upon this in
300
+ # non-sql mode or similar
301
+ raise util.CommandError(
302
+ "No starting revision argument is available."
303
+ )
304
+
305
+ def get_revision_argument(self) -> _RevNumber:
306
+ """Get the 'destination' revision argument.
307
+
308
+ This is typically the argument passed to the
309
+ ``upgrade`` or ``downgrade`` command.
310
+
311
+ If it was specified as ``head``, the actual
312
+ version number is returned; if specified
313
+ as ``base``, ``None`` is returned.
314
+
315
+ This function does not require that the :class:`.MigrationContext`
316
+ has been configured.
317
+
318
+ """
319
+ return self.script.as_revision_number(
320
+ self.context_opts["destination_rev"]
321
+ )
322
+
323
+ def get_tag_argument(self) -> Optional[str]:
324
+ """Return the value passed for the ``--tag`` argument, if any.
325
+
326
+ The ``--tag`` argument is not used directly by Alembic,
327
+ but is available for custom ``env.py`` configurations that
328
+ wish to use it; particularly for offline generation scripts
329
+ that wish to generate tagged filenames.
330
+
331
+ This function does not require that the :class:`.MigrationContext`
332
+ has been configured.
333
+
334
+ .. seealso::
335
+
336
+ :meth:`.EnvironmentContext.get_x_argument` - a newer and more
337
+ open ended system of extending ``env.py`` scripts via the command
338
+ line.
339
+
340
+ """
341
+ return self.context_opts.get("tag", None)
342
+
343
+ @overload
344
+ def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]: ...
345
+
346
+ @overload
347
+ def get_x_argument(
348
+ self, as_dictionary: Literal[True]
349
+ ) -> Dict[str, str]: ...
350
+
351
+ @overload
352
+ def get_x_argument(
353
+ self, as_dictionary: bool = ...
354
+ ) -> Union[List[str], Dict[str, str]]: ...
355
+
356
+ def get_x_argument(
357
+ self, as_dictionary: bool = False
358
+ ) -> Union[List[str], Dict[str, str]]:
359
+ """Return the value(s) passed for the ``-x`` argument, if any.
360
+
361
+ The ``-x`` argument is an open ended flag that allows any user-defined
362
+ value or values to be passed on the command line, then available
363
+ here for consumption by a custom ``env.py`` script.
364
+
365
+ The return value is a list, returned directly from the ``argparse``
366
+ structure. If ``as_dictionary=True`` is passed, the ``x`` arguments
367
+ are parsed using ``key=value`` format into a dictionary that is
368
+ then returned. If there is no ``=`` in the argument, value is an empty
369
+ string.
370
+
371
+ .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when
372
+ arguments are passed without the ``=`` symbol.
373
+
374
+ For example, to support passing a database URL on the command line,
375
+ the standard ``env.py`` script can be modified like this::
376
+
377
+ cmd_line_url = context.get_x_argument(
378
+ as_dictionary=True).get('dbname')
379
+ if cmd_line_url:
380
+ engine = create_engine(cmd_line_url)
381
+ else:
382
+ engine = engine_from_config(
383
+ config.get_section(config.config_ini_section),
384
+ prefix='sqlalchemy.',
385
+ poolclass=pool.NullPool)
386
+
387
+ This then takes effect by running the ``alembic`` script as::
388
+
389
+ alembic -x dbname=postgresql://user:pass@host/dbname upgrade head
390
+
391
+ This function does not require that the :class:`.MigrationContext`
392
+ has been configured.
393
+
394
+ .. seealso::
395
+
396
+ :meth:`.EnvironmentContext.get_tag_argument`
397
+
398
+ :attr:`.Config.cmd_opts`
399
+
400
+ """
401
+ if self.config.cmd_opts is not None:
402
+ value = self.config.cmd_opts.x or []
403
+ else:
404
+ value = []
405
+ if as_dictionary:
406
+ dict_value = {}
407
+ for arg in value:
408
+ x_key, _, x_value = arg.partition("=")
409
+ dict_value[x_key] = x_value
410
+ value = dict_value
411
+
412
+ return value
413
+
414
+ def configure(
415
+ self,
416
+ connection: Optional[Connection] = None,
417
+ url: Optional[Union[str, URL]] = None,
418
+ dialect_name: Optional[str] = None,
419
+ dialect_opts: Optional[Dict[str, Any]] = None,
420
+ transactional_ddl: Optional[bool] = None,
421
+ transaction_per_migration: bool = False,
422
+ output_buffer: Optional[TextIO] = None,
423
+ starting_rev: Optional[str] = None,
424
+ tag: Optional[str] = None,
425
+ template_args: Optional[Dict[str, Any]] = None,
426
+ render_as_batch: bool = False,
427
+ target_metadata: Union[MetaData, Sequence[MetaData], None] = None,
428
+ include_name: Optional[IncludeNameFn] = None,
429
+ include_object: Optional[IncludeObjectFn] = None,
430
+ include_schemas: bool = False,
431
+ process_revision_directives: Optional[
432
+ ProcessRevisionDirectiveFn
433
+ ] = None,
434
+ compare_type: Union[bool, CompareType] = True,
435
+ compare_server_default: Union[bool, CompareServerDefault] = False,
436
+ render_item: Optional[RenderItemFn] = None,
437
+ literal_binds: bool = False,
438
+ upgrade_token: str = "upgrades",
439
+ downgrade_token: str = "downgrades",
440
+ alembic_module_prefix: str = "op.",
441
+ sqlalchemy_module_prefix: str = "sa.",
442
+ user_module_prefix: Optional[str] = None,
443
+ on_version_apply: Optional[OnVersionApplyFn] = None,
444
+ **kw: Any,
445
+ ) -> None:
446
+ """Configure a :class:`.MigrationContext` within this
447
+ :class:`.EnvironmentContext` which will provide database
448
+ connectivity and other configuration to a series of
449
+ migration scripts.
450
+
451
+ Many methods on :class:`.EnvironmentContext` require that
452
+ this method has been called in order to function, as they
453
+ ultimately need to have database access or at least access
454
+ to the dialect in use. Those which do are documented as such.
455
+
456
+ The important thing needed by :meth:`.configure` is a
457
+ means to determine what kind of database dialect is in use.
458
+ An actual connection to that database is needed only if
459
+ the :class:`.MigrationContext` is to be used in
460
+ "online" mode.
461
+
462
+ If the :meth:`.is_offline_mode` function returns ``True``,
463
+ then no connection is needed here. Otherwise, the
464
+ ``connection`` parameter should be present as an
465
+ instance of :class:`sqlalchemy.engine.Connection`.
466
+
467
+ This function is typically called from the ``env.py``
468
+ script within a migration environment. It can be called
469
+ multiple times for an invocation. The most recent
470
+ :class:`~sqlalchemy.engine.Connection`
471
+ for which it was called is the one that will be operated upon
472
+ by the next call to :meth:`.run_migrations`.
473
+
474
+ General parameters:
475
+
476
+ :param connection: a :class:`~sqlalchemy.engine.Connection`
477
+ to use
478
+ for SQL execution in "online" mode. When present, is also
479
+ used to determine the type of dialect in use.
480
+ :param url: a string database url, or a
481
+ :class:`sqlalchemy.engine.url.URL` object.
482
+ The type of dialect to be used will be derived from this if
483
+ ``connection`` is not passed.
484
+ :param dialect_name: string name of a dialect, such as
485
+ "postgresql", "mssql", etc.
486
+ The type of dialect to be used will be derived from this if
487
+ ``connection`` and ``url`` are not passed.
488
+ :param dialect_opts: dictionary of options to be passed to dialect
489
+ constructor.
490
+ :param transactional_ddl: Force the usage of "transactional"
491
+ DDL on or off;
492
+ this otherwise defaults to whether or not the dialect in
493
+ use supports it.
494
+ :param transaction_per_migration: if True, nest each migration script
495
+ in a transaction rather than the full series of migrations to
496
+ run.
497
+ :param output_buffer: a file-like object that will be used
498
+ for textual output
499
+ when the ``--sql`` option is used to generate SQL scripts.
500
+ Defaults to
501
+ ``sys.stdout`` if not passed here and also not present on
502
+ the :class:`.Config`
503
+ object. The value here overrides that of the :class:`.Config`
504
+ object.
505
+ :param output_encoding: when using ``--sql`` to generate SQL
506
+ scripts, apply this encoding to the string output.
507
+ :param literal_binds: when using ``--sql`` to generate SQL
508
+ scripts, pass through the ``literal_binds`` flag to the compiler
509
+ so that any literal values that would ordinarily be bound
510
+ parameters are converted to plain strings.
511
+
512
+ .. warning:: Dialects can typically only handle simple datatypes
513
+ like strings and numbers for auto-literal generation. Datatypes
514
+ like dates, intervals, and others may still require manual
515
+ formatting, typically using :meth:`.Operations.inline_literal`.
516
+
517
+ .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy
518
+ versions prior to 0.8 where this feature is not supported.
519
+
520
+ .. seealso::
521
+
522
+ :meth:`.Operations.inline_literal`
523
+
524
+ :param starting_rev: Override the "starting revision" argument
525
+ when using ``--sql`` mode.
526
+ :param tag: a string tag for usage by custom ``env.py`` scripts.
527
+ Set via the ``--tag`` option, can be overridden here.
528
+ :param template_args: dictionary of template arguments which
529
+ will be added to the template argument environment when
530
+ running the "revision" command. Note that the script environment
531
+ is only run within the "revision" command if the --autogenerate
532
+ option is used, or if the option "revision_environment=true"
533
+ is present in the alembic.ini file.
534
+
535
+ :param version_table: The name of the Alembic version table.
536
+ The default is ``'alembic_version'``.
537
+ :param version_table_schema: Optional schema to place version
538
+ table within.
539
+ :param version_table_pk: boolean, whether the Alembic version table
540
+ should use a primary key constraint for the "value" column; this
541
+ only takes effect when the table is first created.
542
+ Defaults to True; setting to False should not be necessary and is
543
+ here for backwards compatibility reasons.
544
+ :param on_version_apply: a callable or collection of callables to be
545
+ run for each migration step.
546
+ The callables will be run in the order they are given, once for
547
+ each migration step, after the respective operation has been
548
+ applied but before its transaction is finalized.
549
+ Each callable accepts no positional arguments and the following
550
+ keyword arguments:
551
+
552
+ * ``ctx``: the :class:`.MigrationContext` running the migration,
553
+ * ``step``: a :class:`.MigrationInfo` representing the
554
+ step currently being applied,
555
+ * ``heads``: a collection of version strings representing the
556
+ current heads,
557
+ * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`.
558
+
559
+ Parameters specific to the autogenerate feature, when
560
+ ``alembic revision`` is run with the ``--autogenerate`` feature:
561
+
562
+ :param target_metadata: a :class:`sqlalchemy.schema.MetaData`
563
+ object, or a sequence of :class:`~sqlalchemy.schema.MetaData`
564
+ objects, that will be consulted during autogeneration.
565
+ The tables present in each :class:`~sqlalchemy.schema.MetaData`
566
+ will be compared against
567
+ what is locally available on the target
568
+ :class:`~sqlalchemy.engine.Connection`
569
+ to produce candidate upgrade/downgrade operations.
570
+ :param compare_type: Indicates type comparison behavior during
571
+ an autogenerate
572
+ operation. Defaults to ``True`` turning on type comparison, which
573
+ has good accuracy on most backends. See :ref:`compare_types`
574
+ for an example as well as information on other type
575
+ comparison options. Set to ``False`` which disables type
576
+ comparison. A callable can also be passed to provide custom type
577
+ comparison, see :ref:`compare_types` for additional details.
578
+
579
+ .. versionchanged:: 1.12.0 The default value of
580
+ :paramref:`.EnvironmentContext.configure.compare_type` has been
581
+ changed to ``True``.
582
+
583
+ .. seealso::
584
+
585
+ :ref:`compare_types`
586
+
587
+ :paramref:`.EnvironmentContext.configure.compare_server_default`
588
+
589
+ :param compare_server_default: Indicates server default comparison
590
+ behavior during
591
+ an autogenerate operation. Defaults to ``False`` which disables
592
+ server default
593
+ comparison. Set to ``True`` to turn on server default comparison,
594
+ which has
595
+ varied accuracy depending on backend.
596
+
597
+ To customize server default comparison behavior, a callable may
598
+ be specified
599
+ which can filter server default comparisons during an
600
+ autogenerate operation.
601
+ defaults during an autogenerate operation. The format of this
602
+ callable is::
603
+
604
+ def my_compare_server_default(context, inspected_column,
605
+ metadata_column, inspected_default, metadata_default,
606
+ rendered_metadata_default):
607
+ # return True if the defaults are different,
608
+ # False if not, or None to allow the default implementation
609
+ # to compare these defaults
610
+ return None
611
+
612
+ context.configure(
613
+ # ...
614
+ compare_server_default = my_compare_server_default
615
+ )
616
+
617
+ ``inspected_column`` is a dictionary structure as returned by
618
+ :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas
619
+ ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from
620
+ the local model environment.
621
+
622
+ A return value of ``None`` indicates to allow default server default
623
+ comparison
624
+ to proceed. Note that some backends such as Postgresql actually
625
+ execute
626
+ the two defaults on the database side to compare for equivalence.
627
+
628
+ .. seealso::
629
+
630
+ :paramref:`.EnvironmentContext.configure.compare_type`
631
+
632
+ :param include_name: A callable function which is given
633
+ the chance to return ``True`` or ``False`` for any database reflected
634
+ object based on its name, including database schema names when
635
+ the :paramref:`.EnvironmentContext.configure.include_schemas` flag
636
+ is set to ``True``.
637
+
638
+ The function accepts the following positional arguments:
639
+
640
+ * ``name``: the name of the object, such as schema name or table name.
641
+ Will be ``None`` when indicating the default schema name of the
642
+ database connection.
643
+ * ``type``: a string describing the type of object; currently
644
+ ``"schema"``, ``"table"``, ``"column"``, ``"index"``,
645
+ ``"unique_constraint"``, or ``"foreign_key_constraint"``
646
+ * ``parent_names``: a dictionary of "parent" object names, that are
647
+ relative to the name being given. Keys in this dictionary may
648
+ include: ``"schema_name"``, ``"table_name"`` or
649
+ ``"schema_qualified_table_name"``.
650
+
651
+ E.g.::
652
+
653
+ def include_name(name, type_, parent_names):
654
+ if type_ == "schema":
655
+ return name in ["schema_one", "schema_two"]
656
+ else:
657
+ return True
658
+
659
+ context.configure(
660
+ # ...
661
+ include_schemas = True,
662
+ include_name = include_name
663
+ )
664
+
665
+ .. seealso::
666
+
667
+ :ref:`autogenerate_include_hooks`
668
+
669
+ :paramref:`.EnvironmentContext.configure.include_object`
670
+
671
+ :paramref:`.EnvironmentContext.configure.include_schemas`
672
+
673
+
674
+ :param include_object: A callable function which is given
675
+ the chance to return ``True`` or ``False`` for any object,
676
+ indicating if the given object should be considered in the
677
+ autogenerate sweep.
678
+
679
+ The function accepts the following positional arguments:
680
+
681
+ * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such
682
+ as a :class:`~sqlalchemy.schema.Table`,
683
+ :class:`~sqlalchemy.schema.Column`,
684
+ :class:`~sqlalchemy.schema.Index`
685
+ :class:`~sqlalchemy.schema.UniqueConstraint`,
686
+ or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object
687
+ * ``name``: the name of the object. This is typically available
688
+ via ``object.name``.
689
+ * ``type``: a string describing the type of object; currently
690
+ ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``,
691
+ or ``"foreign_key_constraint"``
692
+ * ``reflected``: ``True`` if the given object was produced based on
693
+ table reflection, ``False`` if it's from a local :class:`.MetaData`
694
+ object.
695
+ * ``compare_to``: the object being compared against, if available,
696
+ else ``None``.
697
+
698
+ E.g.::
699
+
700
+ def include_object(object, name, type_, reflected, compare_to):
701
+ if (type_ == "column" and
702
+ not reflected and
703
+ object.info.get("skip_autogenerate", False)):
704
+ return False
705
+ else:
706
+ return True
707
+
708
+ context.configure(
709
+ # ...
710
+ include_object = include_object
711
+ )
712
+
713
+ For the use case of omitting specific schemas from a target database
714
+ when :paramref:`.EnvironmentContext.configure.include_schemas` is
715
+ set to ``True``, the :attr:`~sqlalchemy.schema.Table.schema`
716
+ attribute can be checked for each :class:`~sqlalchemy.schema.Table`
717
+ object passed to the hook, however it is much more efficient
718
+ to filter on schemas before reflection of objects takes place
719
+ using the :paramref:`.EnvironmentContext.configure.include_name`
720
+ hook.
721
+
722
+ .. seealso::
723
+
724
+ :ref:`autogenerate_include_hooks`
725
+
726
+ :paramref:`.EnvironmentContext.configure.include_name`
727
+
728
+ :paramref:`.EnvironmentContext.configure.include_schemas`
729
+
730
+ :param render_as_batch: if True, commands which alter elements
731
+ within a table will be placed under a ``with batch_alter_table():``
732
+ directive, so that batch migrations will take place.
733
+
734
+ .. seealso::
735
+
736
+ :ref:`batch_migrations`
737
+
738
+ :param include_schemas: If True, autogenerate will scan across
739
+ all schemas located by the SQLAlchemy
740
+ :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names`
741
+ method, and include all differences in tables found across all
742
+ those schemas. When using this option, you may want to also
743
+ use the :paramref:`.EnvironmentContext.configure.include_name`
744
+ parameter to specify a callable which
745
+ can filter the tables/schemas that get included.
746
+
747
+ .. seealso::
748
+
749
+ :ref:`autogenerate_include_hooks`
750
+
751
+ :paramref:`.EnvironmentContext.configure.include_name`
752
+
753
+ :paramref:`.EnvironmentContext.configure.include_object`
754
+
755
+ :param render_item: Callable that can be used to override how
756
+ any schema item, i.e. column, constraint, type,
757
+ etc., is rendered for autogenerate. The callable receives a
758
+ string describing the type of object, the object, and
759
+ the autogen context. If it returns False, the
760
+ default rendering method will be used. If it returns None,
761
+ the item will not be rendered in the context of a Table
762
+ construct, that is, can be used to skip columns or constraints
763
+ within op.create_table()::
764
+
765
+ def my_render_column(type_, col, autogen_context):
766
+ if type_ == "column" and isinstance(col, MySpecialCol):
767
+ return repr(col)
768
+ else:
769
+ return False
770
+
771
+ context.configure(
772
+ # ...
773
+ render_item = my_render_column
774
+ )
775
+
776
+ Available values for the type string include: ``"column"``,
777
+ ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``,
778
+ ``"type"``, ``"server_default"``.
779
+
780
+ .. seealso::
781
+
782
+ :ref:`autogen_render_types`
783
+
784
+ :param upgrade_token: When autogenerate completes, the text of the
785
+ candidate upgrade operations will be present in this template
786
+ variable when ``script.py.mako`` is rendered. Defaults to
787
+ ``upgrades``.
788
+ :param downgrade_token: When autogenerate completes, the text of the
789
+ candidate downgrade operations will be present in this
790
+ template variable when ``script.py.mako`` is rendered. Defaults to
791
+ ``downgrades``.
792
+
793
+ :param alembic_module_prefix: When autogenerate refers to Alembic
794
+ :mod:`alembic.operations` constructs, this prefix will be used
795
+ (i.e. ``op.create_table``) Defaults to "``op.``".
796
+ Can be ``None`` to indicate no prefix.
797
+
798
+ :param sqlalchemy_module_prefix: When autogenerate refers to
799
+ SQLAlchemy
800
+ :class:`~sqlalchemy.schema.Column` or type classes, this prefix
801
+ will be used
802
+ (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``".
803
+ Can be ``None`` to indicate no prefix.
804
+ Note that when dialect-specific types are rendered, autogenerate
805
+ will render them using the dialect module name, i.e. ``mssql.BIT()``,
806
+ ``postgresql.UUID()``.
807
+
808
+ :param user_module_prefix: When autogenerate refers to a SQLAlchemy
809
+ type (e.g. :class:`.TypeEngine`) where the module name is not
810
+ under the ``sqlalchemy`` namespace, this prefix will be used
811
+ within autogenerate. If left at its default of
812
+ ``None``, the ``__module__`` attribute of the type is used to
813
+ render the import module. It's a good practice to set this
814
+ and to have all custom types be available from a fixed module space,
815
+ in order to future-proof migration files against reorganizations
816
+ in modules.
817
+
818
+ .. seealso::
819
+
820
+ :ref:`autogen_module_prefix`
821
+
822
+ :param process_revision_directives: a callable function that will
823
+ be passed a structure representing the end result of an autogenerate
824
+ or plain "revision" operation, which can be manipulated to affect
825
+ how the ``alembic revision`` command ultimately outputs new
826
+ revision scripts. The structure of the callable is::
827
+
828
+ def process_revision_directives(context, revision, directives):
829
+ pass
830
+
831
+ The ``directives`` parameter is a Python list containing
832
+ a single :class:`.MigrationScript` directive, which represents
833
+ the revision file to be generated. This list as well as its
834
+ contents may be freely modified to produce any set of commands.
835
+ The section :ref:`customizing_revision` shows an example of
836
+ doing this. The ``context`` parameter is the
837
+ :class:`.MigrationContext` in use,
838
+ and ``revision`` is a tuple of revision identifiers representing the
839
+ current revision of the database.
840
+
841
+ The callable is invoked at all times when the ``--autogenerate``
842
+ option is passed to ``alembic revision``. If ``--autogenerate``
843
+ is not passed, the callable is invoked only if the
844
+ ``revision_environment`` variable is set to True in the Alembic
845
+ configuration, in which case the given ``directives`` collection
846
+ will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps`
847
+ collections for ``.upgrade_ops`` and ``.downgrade_ops``. The
848
+ ``--autogenerate`` option itself can be inferred by inspecting
849
+ ``context.config.cmd_opts.autogenerate``.
850
+
851
+ The callable function may optionally be an instance of
852
+ a :class:`.Rewriter` object. This is a helper object that
853
+ assists in the production of autogenerate-stream rewriter functions.
854
+
855
+ .. seealso::
856
+
857
+ :ref:`customizing_revision`
858
+
859
+ :ref:`autogen_rewriter`
860
+
861
+ :paramref:`.command.revision.process_revision_directives`
862
+
863
+ Parameters specific to individual backends:
864
+
865
+ :param mssql_batch_separator: The "batch separator" which will
866
+ be placed between each statement when generating offline SQL Server
867
+ migrations. Defaults to ``GO``. Note this is in addition to the
868
+ customary semicolon ``;`` at the end of each statement; SQL Server
869
+ considers the "batch separator" to denote the end of an
870
+ individual statement execution, and cannot group certain
871
+ dependent operations in one step.
872
+ :param oracle_batch_separator: The "batch separator" which will
873
+ be placed between each statement when generating offline
874
+ Oracle migrations. Defaults to ``/``. Oracle doesn't add a
875
+ semicolon between statements like most other backends.
876
+
877
+ """
878
+ opts = self.context_opts
879
+ if transactional_ddl is not None:
880
+ opts["transactional_ddl"] = transactional_ddl
881
+ if output_buffer is not None:
882
+ opts["output_buffer"] = output_buffer
883
+ elif self.config.output_buffer is not None:
884
+ opts["output_buffer"] = self.config.output_buffer
885
+ if starting_rev:
886
+ opts["starting_rev"] = starting_rev
887
+ if tag:
888
+ opts["tag"] = tag
889
+ if template_args and "template_args" in opts:
890
+ opts["template_args"].update(template_args)
891
+ opts["transaction_per_migration"] = transaction_per_migration
892
+ opts["target_metadata"] = target_metadata
893
+ opts["include_name"] = include_name
894
+ opts["include_object"] = include_object
895
+ opts["include_schemas"] = include_schemas
896
+ opts["render_as_batch"] = render_as_batch
897
+ opts["upgrade_token"] = upgrade_token
898
+ opts["downgrade_token"] = downgrade_token
899
+ opts["sqlalchemy_module_prefix"] = sqlalchemy_module_prefix
900
+ opts["alembic_module_prefix"] = alembic_module_prefix
901
+ opts["user_module_prefix"] = user_module_prefix
902
+ opts["literal_binds"] = literal_binds
903
+ opts["process_revision_directives"] = process_revision_directives
904
+ opts["on_version_apply"] = util.to_tuple(on_version_apply, default=())
905
+
906
+ if render_item is not None:
907
+ opts["render_item"] = render_item
908
+ opts["compare_type"] = compare_type
909
+ if compare_server_default is not None:
910
+ opts["compare_server_default"] = compare_server_default
911
+ opts["script"] = self.script
912
+
913
+ opts.update(kw)
914
+
915
+ self._migration_context = MigrationContext.configure(
916
+ connection=connection,
917
+ url=url,
918
+ dialect_name=dialect_name,
919
+ environment_context=self,
920
+ dialect_opts=dialect_opts,
921
+ opts=opts,
922
+ )
923
+
924
+ def run_migrations(self, **kw: Any) -> None:
925
+ """Run migrations as determined by the current command line
926
+ configuration
927
+ as well as versioning information present (or not) in the current
928
+ database connection (if one is present).
929
+
930
+ The function accepts optional ``**kw`` arguments. If these are
931
+ passed, they are sent directly to the ``upgrade()`` and
932
+ ``downgrade()``
933
+ functions within each target revision file. By modifying the
934
+ ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()``
935
+ functions accept arguments, parameters can be passed here so that
936
+ contextual information, usually information to identify a particular
937
+ database in use, can be passed from a custom ``env.py`` script
938
+ to the migration functions.
939
+
940
+ This function requires that a :class:`.MigrationContext` has
941
+ first been made available via :meth:`.configure`.
942
+
943
+ """
944
+ assert self._migration_context is not None
945
+ with Operations.context(self._migration_context):
946
+ self.get_context().run_migrations(**kw)
947
+
948
+ def execute(
949
+ self,
950
+ sql: Union[Executable, str],
951
+ execution_options: Optional[Dict[str, Any]] = None,
952
+ ) -> None:
953
+ """Execute the given SQL using the current change context.
954
+
955
+ The behavior of :meth:`.execute` is the same
956
+ as that of :meth:`.Operations.execute`. Please see that
957
+ function's documentation for full detail including
958
+ caveats and limitations.
959
+
960
+ This function requires that a :class:`.MigrationContext` has
961
+ first been made available via :meth:`.configure`.
962
+
963
+ """
964
+ self.get_context().execute(sql, execution_options=execution_options)
965
+
966
+ def static_output(self, text: str) -> None:
967
+ """Emit text directly to the "offline" SQL stream.
968
+
969
+ Typically this is for emitting comments that
970
+ start with --. The statement is not treated
971
+ as a SQL execution, no ; or batch separator
972
+ is added, etc.
973
+
974
+ """
975
+ self.get_context().impl.static_output(text)
976
+
977
+ def begin_transaction(
978
+ self,
979
+ ) -> Union[_ProxyTransaction, ContextManager[None, Optional[bool]]]:
980
+ """Return a context manager that will
981
+ enclose an operation within a "transaction",
982
+ as defined by the environment's offline
983
+ and transactional DDL settings.
984
+
985
+ e.g.::
986
+
987
+ with context.begin_transaction():
988
+ context.run_migrations()
989
+
990
+ :meth:`.begin_transaction` is intended to
991
+ "do the right thing" regardless of
992
+ calling context:
993
+
994
+ * If :meth:`.is_transactional_ddl` is ``False``,
995
+ returns a "do nothing" context manager
996
+ which otherwise produces no transactional
997
+ state or directives.
998
+ * If :meth:`.is_offline_mode` is ``True``,
999
+ returns a context manager that will
1000
+ invoke the :meth:`.DefaultImpl.emit_begin`
1001
+ and :meth:`.DefaultImpl.emit_commit`
1002
+ methods, which will produce the string
1003
+ directives ``BEGIN`` and ``COMMIT`` on
1004
+ the output stream, as rendered by the
1005
+ target backend (e.g. SQL Server would
1006
+ emit ``BEGIN TRANSACTION``).
1007
+ * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin`
1008
+ on the current online connection, which
1009
+ returns a :class:`sqlalchemy.engine.Transaction`
1010
+ object. This object demarcates a real
1011
+ transaction and is itself a context manager,
1012
+ which will roll back if an exception
1013
+ is raised.
1014
+
1015
+ Note that a custom ``env.py`` script which
1016
+ has more specific transactional needs can of course
1017
+ manipulate the :class:`~sqlalchemy.engine.Connection`
1018
+ directly to produce transactional state in "online"
1019
+ mode.
1020
+
1021
+ """
1022
+
1023
+ return self.get_context().begin_transaction()
1024
+
1025
+ def get_context(self) -> MigrationContext:
1026
+ """Return the current :class:`.MigrationContext` object.
1027
+
1028
+ If :meth:`.EnvironmentContext.configure` has not been
1029
+ called yet, raises an exception.
1030
+
1031
+ """
1032
+
1033
+ if self._migration_context is None:
1034
+ raise Exception("No context has been configured yet.")
1035
+ return self._migration_context
1036
+
1037
+ def get_bind(self) -> Connection:
1038
+ """Return the current 'bind'.
1039
+
1040
+ In "online" mode, this is the
1041
+ :class:`sqlalchemy.engine.Connection` currently being used
1042
+ to emit SQL to the database.
1043
+
1044
+ This function requires that a :class:`.MigrationContext`
1045
+ has first been made available via :meth:`.configure`.
1046
+
1047
+ """
1048
+ return self.get_context().bind # type: ignore[return-value]
1049
+
1050
+ def get_impl(self) -> DefaultImpl:
1051
+ return self.get_context().impl
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/runtime/migration.py ADDED
@@ -0,0 +1,1395 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ from contextlib import contextmanager
7
+ from contextlib import nullcontext
8
+ import logging
9
+ import sys
10
+ from typing import Any
11
+ from typing import Callable
12
+ from typing import cast
13
+ from typing import Collection
14
+ from typing import Dict
15
+ from typing import Iterable
16
+ from typing import Iterator
17
+ from typing import List
18
+ from typing import Optional
19
+ from typing import Set
20
+ from typing import Tuple
21
+ from typing import TYPE_CHECKING
22
+ from typing import Union
23
+
24
+ from sqlalchemy import Column
25
+ from sqlalchemy import literal_column
26
+ from sqlalchemy import select
27
+ from sqlalchemy.engine import Engine
28
+ from sqlalchemy.engine import url as sqla_url
29
+ from sqlalchemy.engine.strategies import MockEngineStrategy
30
+ from typing_extensions import ContextManager
31
+
32
+ from .. import ddl
33
+ from .. import util
34
+ from ..util import sqla_compat
35
+ from ..util.compat import EncodedIO
36
+
37
+ if TYPE_CHECKING:
38
+ from sqlalchemy.engine import Dialect
39
+ from sqlalchemy.engine import URL
40
+ from sqlalchemy.engine.base import Connection
41
+ from sqlalchemy.engine.base import Transaction
42
+ from sqlalchemy.engine.mock import MockConnection
43
+ from sqlalchemy.sql import Executable
44
+
45
+ from .environment import EnvironmentContext
46
+ from ..config import Config
47
+ from ..script.base import Script
48
+ from ..script.base import ScriptDirectory
49
+ from ..script.revision import _RevisionOrBase
50
+ from ..script.revision import Revision
51
+ from ..script.revision import RevisionMap
52
+
53
+ log = logging.getLogger(__name__)
54
+
55
+
56
+ class _ProxyTransaction:
57
+ def __init__(self, migration_context: MigrationContext) -> None:
58
+ self.migration_context = migration_context
59
+
60
+ @property
61
+ def _proxied_transaction(self) -> Optional[Transaction]:
62
+ return self.migration_context._transaction
63
+
64
+ def rollback(self) -> None:
65
+ t = self._proxied_transaction
66
+ assert t is not None
67
+ t.rollback()
68
+ self.migration_context._transaction = None
69
+
70
+ def commit(self) -> None:
71
+ t = self._proxied_transaction
72
+ assert t is not None
73
+ t.commit()
74
+ self.migration_context._transaction = None
75
+
76
+ def __enter__(self) -> _ProxyTransaction:
77
+ return self
78
+
79
+ def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
80
+ if self._proxied_transaction is not None:
81
+ self._proxied_transaction.__exit__(type_, value, traceback)
82
+ self.migration_context._transaction = None
83
+
84
+
85
+ class MigrationContext:
86
+ """Represent the database state made available to a migration
87
+ script.
88
+
89
+ :class:`.MigrationContext` is the front end to an actual
90
+ database connection, or alternatively a string output
91
+ stream given a particular database dialect,
92
+ from an Alembic perspective.
93
+
94
+ When inside the ``env.py`` script, the :class:`.MigrationContext`
95
+ is available via the
96
+ :meth:`.EnvironmentContext.get_context` method,
97
+ which is available at ``alembic.context``::
98
+
99
+ # from within env.py script
100
+ from alembic import context
101
+
102
+ migration_context = context.get_context()
103
+
104
+ For usage outside of an ``env.py`` script, such as for
105
+ utility routines that want to check the current version
106
+ in the database, the :meth:`.MigrationContext.configure`
107
+ method to create new :class:`.MigrationContext` objects.
108
+ For example, to get at the current revision in the
109
+ database using :meth:`.MigrationContext.get_current_revision`::
110
+
111
+ # in any application, outside of an env.py script
112
+ from alembic.migration import MigrationContext
113
+ from sqlalchemy import create_engine
114
+
115
+ engine = create_engine("postgresql://mydatabase")
116
+ conn = engine.connect()
117
+
118
+ context = MigrationContext.configure(conn)
119
+ current_rev = context.get_current_revision()
120
+
121
+ The above context can also be used to produce
122
+ Alembic migration operations with an :class:`.Operations`
123
+ instance::
124
+
125
+ # in any application, outside of the normal Alembic environment
126
+ from alembic.operations import Operations
127
+
128
+ op = Operations(context)
129
+ op.alter_column("mytable", "somecolumn", nullable=True)
130
+
131
+ """
132
+
133
+ def __init__(
134
+ self,
135
+ dialect: Dialect,
136
+ connection: Optional[Connection],
137
+ opts: Dict[str, Any],
138
+ environment_context: Optional[EnvironmentContext] = None,
139
+ ) -> None:
140
+ self.environment_context = environment_context
141
+ self.opts = opts
142
+ self.dialect = dialect
143
+ self.script: Optional[ScriptDirectory] = opts.get("script")
144
+ as_sql: bool = opts.get("as_sql", False)
145
+ transactional_ddl = opts.get("transactional_ddl")
146
+ self._transaction_per_migration = opts.get(
147
+ "transaction_per_migration", False
148
+ )
149
+ self.on_version_apply_callbacks = opts.get("on_version_apply", ())
150
+ self._transaction: Optional[Transaction] = None
151
+
152
+ if as_sql:
153
+ self.connection = cast(
154
+ Optional["Connection"], self._stdout_connection(connection)
155
+ )
156
+ assert self.connection is not None
157
+ self._in_external_transaction = False
158
+ else:
159
+ self.connection = connection
160
+ self._in_external_transaction = (
161
+ sqla_compat._get_connection_in_transaction(connection)
162
+ )
163
+
164
+ self._migrations_fn: Optional[
165
+ Callable[..., Iterable[RevisionStep]]
166
+ ] = opts.get("fn")
167
+ self.as_sql = as_sql
168
+
169
+ self.purge = opts.get("purge", False)
170
+
171
+ if "output_encoding" in opts:
172
+ self.output_buffer = EncodedIO(
173
+ opts.get("output_buffer")
174
+ or sys.stdout, # type:ignore[arg-type]
175
+ opts["output_encoding"],
176
+ )
177
+ else:
178
+ self.output_buffer = opts.get(
179
+ "output_buffer", sys.stdout
180
+ ) # type:ignore[assignment] # noqa: E501
181
+
182
+ self.transactional_ddl = transactional_ddl
183
+
184
+ self._user_compare_type = opts.get("compare_type", True)
185
+ self._user_compare_server_default = opts.get(
186
+ "compare_server_default", False
187
+ )
188
+ self.version_table = version_table = opts.get(
189
+ "version_table", "alembic_version"
190
+ )
191
+ self.version_table_schema = version_table_schema = opts.get(
192
+ "version_table_schema", None
193
+ )
194
+
195
+ self._start_from_rev: Optional[str] = opts.get("starting_rev")
196
+ self.impl = ddl.DefaultImpl.get_by_dialect(dialect)(
197
+ dialect,
198
+ self.connection,
199
+ self.as_sql,
200
+ transactional_ddl,
201
+ self.output_buffer,
202
+ opts,
203
+ )
204
+
205
+ self._version = self.impl.version_table_impl(
206
+ version_table=version_table,
207
+ version_table_schema=version_table_schema,
208
+ version_table_pk=opts.get("version_table_pk", True),
209
+ )
210
+
211
+ log.info("Context impl %s.", self.impl.__class__.__name__)
212
+ if self.as_sql:
213
+ log.info("Generating static SQL")
214
+ log.info(
215
+ "Will assume %s DDL.",
216
+ (
217
+ "transactional"
218
+ if self.impl.transactional_ddl
219
+ else "non-transactional"
220
+ ),
221
+ )
222
+
223
+ @classmethod
224
+ def configure(
225
+ cls,
226
+ connection: Optional[Connection] = None,
227
+ url: Optional[Union[str, URL]] = None,
228
+ dialect_name: Optional[str] = None,
229
+ dialect: Optional[Dialect] = None,
230
+ environment_context: Optional[EnvironmentContext] = None,
231
+ dialect_opts: Optional[Dict[str, str]] = None,
232
+ opts: Optional[Any] = None,
233
+ ) -> MigrationContext:
234
+ """Create a new :class:`.MigrationContext`.
235
+
236
+ This is a factory method usually called
237
+ by :meth:`.EnvironmentContext.configure`.
238
+
239
+ :param connection: a :class:`~sqlalchemy.engine.Connection`
240
+ to use for SQL execution in "online" mode. When present,
241
+ is also used to determine the type of dialect in use.
242
+ :param url: a string database url, or a
243
+ :class:`sqlalchemy.engine.url.URL` object.
244
+ The type of dialect to be used will be derived from this if
245
+ ``connection`` is not passed.
246
+ :param dialect_name: string name of a dialect, such as
247
+ "postgresql", "mssql", etc. The type of dialect to be used will be
248
+ derived from this if ``connection`` and ``url`` are not passed.
249
+ :param opts: dictionary of options. Most other options
250
+ accepted by :meth:`.EnvironmentContext.configure` are passed via
251
+ this dictionary.
252
+
253
+ """
254
+ if opts is None:
255
+ opts = {}
256
+ if dialect_opts is None:
257
+ dialect_opts = {}
258
+
259
+ if connection:
260
+ if isinstance(connection, Engine):
261
+ raise util.CommandError(
262
+ "'connection' argument to configure() is expected "
263
+ "to be a sqlalchemy.engine.Connection instance, "
264
+ "got %r" % connection,
265
+ )
266
+
267
+ dialect = connection.dialect
268
+ elif url:
269
+ url_obj = sqla_url.make_url(url)
270
+ dialect = url_obj.get_dialect()(**dialect_opts)
271
+ elif dialect_name:
272
+ url_obj = sqla_url.make_url("%s://" % dialect_name)
273
+ dialect = url_obj.get_dialect()(**dialect_opts)
274
+ elif not dialect:
275
+ raise Exception("Connection, url, or dialect_name is required.")
276
+ assert dialect is not None
277
+ return MigrationContext(dialect, connection, opts, environment_context)
278
+
279
+ @contextmanager
280
+ def autocommit_block(self) -> Iterator[None]:
281
+ """Enter an "autocommit" block, for databases that support AUTOCOMMIT
282
+ isolation levels.
283
+
284
+ This special directive is intended to support the occasional database
285
+ DDL or system operation that specifically has to be run outside of
286
+ any kind of transaction block. The PostgreSQL database platform
287
+ is the most common target for this style of operation, as many
288
+ of its DDL operations must be run outside of transaction blocks, even
289
+ though the database overall supports transactional DDL.
290
+
291
+ The method is used as a context manager within a migration script, by
292
+ calling on :meth:`.Operations.get_context` to retrieve the
293
+ :class:`.MigrationContext`, then invoking
294
+ :meth:`.MigrationContext.autocommit_block` using the ``with:``
295
+ statement::
296
+
297
+ def upgrade():
298
+ with op.get_context().autocommit_block():
299
+ op.execute("ALTER TYPE mood ADD VALUE 'soso'")
300
+
301
+ Above, a PostgreSQL "ALTER TYPE..ADD VALUE" directive is emitted,
302
+ which must be run outside of a transaction block at the database level.
303
+ The :meth:`.MigrationContext.autocommit_block` method makes use of the
304
+ SQLAlchemy ``AUTOCOMMIT`` isolation level setting, which against the
305
+ psycogp2 DBAPI corresponds to the ``connection.autocommit`` setting,
306
+ to ensure that the database driver is not inside of a DBAPI level
307
+ transaction block.
308
+
309
+ .. warning::
310
+
311
+ As is necessary, **the database transaction preceding the block is
312
+ unconditionally committed**. This means that the run of migrations
313
+ preceding the operation will be committed, before the overall
314
+ migration operation is complete.
315
+
316
+ It is recommended that when an application includes migrations with
317
+ "autocommit" blocks, that
318
+ :paramref:`.EnvironmentContext.transaction_per_migration` be used
319
+ so that the calling environment is tuned to expect short per-file
320
+ migrations whether or not one of them has an autocommit block.
321
+
322
+
323
+ """
324
+ _in_connection_transaction = self._in_connection_transaction()
325
+
326
+ if self.impl.transactional_ddl and self.as_sql:
327
+ self.impl.emit_commit()
328
+
329
+ elif _in_connection_transaction:
330
+ assert self._transaction is not None
331
+
332
+ self._transaction.commit()
333
+ self._transaction = None
334
+
335
+ if not self.as_sql:
336
+ assert self.connection is not None
337
+ current_level = self.connection.get_isolation_level()
338
+ base_connection = self.connection
339
+
340
+ # in 1.3 and 1.4 non-future mode, the connection gets switched
341
+ # out. we can use the base connection with the new mode
342
+ # except that it will not know it's in "autocommit" and will
343
+ # emit deprecation warnings when an autocommit action takes
344
+ # place.
345
+ self.connection = self.impl.connection = (
346
+ base_connection.execution_options(isolation_level="AUTOCOMMIT")
347
+ )
348
+
349
+ # sqlalchemy future mode will "autobegin" in any case, so take
350
+ # control of that "transaction" here
351
+ fake_trans: Optional[Transaction] = self.connection.begin()
352
+ else:
353
+ fake_trans = None
354
+ try:
355
+ yield
356
+ finally:
357
+ if not self.as_sql:
358
+ assert self.connection is not None
359
+ if fake_trans is not None:
360
+ fake_trans.commit()
361
+ self.connection.execution_options(
362
+ isolation_level=current_level
363
+ )
364
+ self.connection = self.impl.connection = base_connection
365
+
366
+ if self.impl.transactional_ddl and self.as_sql:
367
+ self.impl.emit_begin()
368
+
369
+ elif _in_connection_transaction:
370
+ assert self.connection is not None
371
+ self._transaction = self.connection.begin()
372
+
373
+ def begin_transaction(
374
+ self, _per_migration: bool = False
375
+ ) -> Union[_ProxyTransaction, ContextManager[None, Optional[bool]]]:
376
+ """Begin a logical transaction for migration operations.
377
+
378
+ This method is used within an ``env.py`` script to demarcate where
379
+ the outer "transaction" for a series of migrations begins. Example::
380
+
381
+ def run_migrations_online():
382
+ connectable = create_engine(...)
383
+
384
+ with connectable.connect() as connection:
385
+ context.configure(
386
+ connection=connection, target_metadata=target_metadata
387
+ )
388
+
389
+ with context.begin_transaction():
390
+ context.run_migrations()
391
+
392
+ Above, :meth:`.MigrationContext.begin_transaction` is used to demarcate
393
+ where the outer logical transaction occurs around the
394
+ :meth:`.MigrationContext.run_migrations` operation.
395
+
396
+ A "Logical" transaction means that the operation may or may not
397
+ correspond to a real database transaction. If the target database
398
+ supports transactional DDL (or
399
+ :paramref:`.EnvironmentContext.configure.transactional_ddl` is true),
400
+ the :paramref:`.EnvironmentContext.configure.transaction_per_migration`
401
+ flag is not set, and the migration is against a real database
402
+ connection (as opposed to using "offline" ``--sql`` mode), a real
403
+ transaction will be started. If ``--sql`` mode is in effect, the
404
+ operation would instead correspond to a string such as "BEGIN" being
405
+ emitted to the string output.
406
+
407
+ The returned object is a Python context manager that should only be
408
+ used in the context of a ``with:`` statement as indicated above.
409
+ The object has no other guaranteed API features present.
410
+
411
+ .. seealso::
412
+
413
+ :meth:`.MigrationContext.autocommit_block`
414
+
415
+ """
416
+
417
+ if self._in_external_transaction:
418
+ return nullcontext()
419
+
420
+ if self.impl.transactional_ddl:
421
+ transaction_now = _per_migration == self._transaction_per_migration
422
+ else:
423
+ transaction_now = _per_migration is True
424
+
425
+ if not transaction_now:
426
+ return nullcontext()
427
+
428
+ elif not self.impl.transactional_ddl:
429
+ assert _per_migration
430
+
431
+ if self.as_sql:
432
+ return nullcontext()
433
+ else:
434
+ # track our own notion of a "transaction block", which must be
435
+ # committed when complete. Don't rely upon whether or not the
436
+ # SQLAlchemy connection reports as "in transaction"; this
437
+ # because SQLAlchemy future connection features autobegin
438
+ # behavior, so it may already be in a transaction from our
439
+ # emitting of queries like "has_version_table", etc. While we
440
+ # could track these operations as well, that leaves open the
441
+ # possibility of new operations or other things happening in
442
+ # the user environment that still may be triggering
443
+ # "autobegin".
444
+
445
+ in_transaction = self._transaction is not None
446
+
447
+ if in_transaction:
448
+ return nullcontext()
449
+ else:
450
+ assert self.connection is not None
451
+ self._transaction = (
452
+ sqla_compat._safe_begin_connection_transaction(
453
+ self.connection
454
+ )
455
+ )
456
+ return _ProxyTransaction(self)
457
+ elif self.as_sql:
458
+
459
+ @contextmanager
460
+ def begin_commit():
461
+ self.impl.emit_begin()
462
+ yield
463
+ self.impl.emit_commit()
464
+
465
+ return begin_commit()
466
+ else:
467
+ assert self.connection is not None
468
+ self._transaction = sqla_compat._safe_begin_connection_transaction(
469
+ self.connection
470
+ )
471
+ return _ProxyTransaction(self)
472
+
473
+ def get_current_revision(self) -> Optional[str]:
474
+ """Return the current revision, usually that which is present
475
+ in the ``alembic_version`` table in the database.
476
+
477
+ This method intends to be used only for a migration stream that
478
+ does not contain unmerged branches in the target database;
479
+ if there are multiple branches present, an exception is raised.
480
+ The :meth:`.MigrationContext.get_current_heads` should be preferred
481
+ over this method going forward in order to be compatible with
482
+ branch migration support.
483
+
484
+ If this :class:`.MigrationContext` was configured in "offline"
485
+ mode, that is with ``as_sql=True``, the ``starting_rev``
486
+ parameter is returned instead, if any.
487
+
488
+ """
489
+ heads = self.get_current_heads()
490
+ if len(heads) == 0:
491
+ return None
492
+ elif len(heads) > 1:
493
+ raise util.CommandError(
494
+ "Version table '%s' has more than one head present; "
495
+ "please use get_current_heads()" % self.version_table
496
+ )
497
+ else:
498
+ return heads[0]
499
+
500
+ def get_current_heads(self) -> Tuple[str, ...]:
501
+ """Return a tuple of the current 'head versions' that are represented
502
+ in the target database.
503
+
504
+ For a migration stream without branches, this will be a single
505
+ value, synonymous with that of
506
+ :meth:`.MigrationContext.get_current_revision`. However when multiple
507
+ unmerged branches exist within the target database, the returned tuple
508
+ will contain a value for each head.
509
+
510
+ If this :class:`.MigrationContext` was configured in "offline"
511
+ mode, that is with ``as_sql=True``, the ``starting_rev``
512
+ parameter is returned in a one-length tuple.
513
+
514
+ If no version table is present, or if there are no revisions
515
+ present, an empty tuple is returned.
516
+
517
+ """
518
+ if self.as_sql:
519
+ start_from_rev: Any = self._start_from_rev
520
+ if start_from_rev == "base":
521
+ start_from_rev = None
522
+ elif start_from_rev is not None and self.script:
523
+ start_from_rev = [
524
+ self.script.get_revision(sfr).revision
525
+ for sfr in util.to_list(start_from_rev)
526
+ if sfr not in (None, "base")
527
+ ]
528
+ return util.to_tuple(start_from_rev, default=())
529
+ else:
530
+ if self._start_from_rev:
531
+ raise util.CommandError(
532
+ "Can't specify current_rev to context "
533
+ "when using a database connection"
534
+ )
535
+ if not self._has_version_table():
536
+ return ()
537
+ assert self.connection is not None
538
+ return tuple(
539
+ row[0]
540
+ for row in self.connection.execute(
541
+ select(self._version.c.version_num)
542
+ )
543
+ )
544
+
545
+ def _ensure_version_table(self, purge: bool = False) -> None:
546
+ with sqla_compat._ensure_scope_for_ddl(self.connection):
547
+ assert self.connection is not None
548
+ self._version.create(self.connection, checkfirst=True)
549
+ if purge:
550
+ assert self.connection is not None
551
+ self.connection.execute(self._version.delete())
552
+
553
+ def _has_version_table(self) -> bool:
554
+ assert self.connection is not None
555
+ return sqla_compat._connectable_has_table(
556
+ self.connection, self.version_table, self.version_table_schema
557
+ )
558
+
559
+ def stamp(self, script_directory: ScriptDirectory, revision: str) -> None:
560
+ """Stamp the version table with a specific revision.
561
+
562
+ This method calculates those branches to which the given revision
563
+ can apply, and updates those branches as though they were migrated
564
+ towards that revision (either up or down). If no current branches
565
+ include the revision, it is added as a new branch head.
566
+
567
+ """
568
+ heads = self.get_current_heads()
569
+ if not self.as_sql and not heads:
570
+ self._ensure_version_table()
571
+ head_maintainer = HeadMaintainer(self, heads)
572
+ for step in script_directory._stamp_revs(revision, heads):
573
+ head_maintainer.update_to_step(step)
574
+
575
+ def run_migrations(self, **kw: Any) -> None:
576
+ r"""Run the migration scripts established for this
577
+ :class:`.MigrationContext`, if any.
578
+
579
+ The commands in :mod:`alembic.command` will set up a function
580
+ that is ultimately passed to the :class:`.MigrationContext`
581
+ as the ``fn`` argument. This function represents the "work"
582
+ that will be done when :meth:`.MigrationContext.run_migrations`
583
+ is called, typically from within the ``env.py`` script of the
584
+ migration environment. The "work function" then provides an iterable
585
+ of version callables and other version information which
586
+ in the case of the ``upgrade`` or ``downgrade`` commands are the
587
+ list of version scripts to invoke. Other commands yield nothing,
588
+ in the case that a command wants to run some other operation
589
+ against the database such as the ``current`` or ``stamp`` commands.
590
+
591
+ :param \**kw: keyword arguments here will be passed to each
592
+ migration callable, that is the ``upgrade()`` or ``downgrade()``
593
+ method within revision scripts.
594
+
595
+ """
596
+ self.impl.start_migrations()
597
+
598
+ heads: Tuple[str, ...]
599
+ if self.purge:
600
+ if self.as_sql:
601
+ raise util.CommandError("Can't use --purge with --sql mode")
602
+ self._ensure_version_table(purge=True)
603
+ heads = ()
604
+ else:
605
+ heads = self.get_current_heads()
606
+
607
+ dont_mutate = self.opts.get("dont_mutate", False)
608
+
609
+ if not self.as_sql and not heads and not dont_mutate:
610
+ self._ensure_version_table()
611
+
612
+ head_maintainer = HeadMaintainer(self, heads)
613
+
614
+ assert self._migrations_fn is not None
615
+ for step in self._migrations_fn(heads, self):
616
+ with self.begin_transaction(_per_migration=True):
617
+ if self.as_sql and not head_maintainer.heads:
618
+ # for offline mode, include a CREATE TABLE from
619
+ # the base
620
+ assert self.connection is not None
621
+ self._version.create(self.connection)
622
+ log.info("Running %s", step)
623
+ if self.as_sql:
624
+ self.impl.static_output(
625
+ "-- Running %s" % (step.short_log,)
626
+ )
627
+ step.migration_fn(**kw)
628
+
629
+ # previously, we wouldn't stamp per migration
630
+ # if we were in a transaction, however given the more
631
+ # complex model that involves any number of inserts
632
+ # and row-targeted updates and deletes, it's simpler for now
633
+ # just to run the operations on every version
634
+ head_maintainer.update_to_step(step)
635
+ for callback in self.on_version_apply_callbacks:
636
+ callback(
637
+ ctx=self,
638
+ step=step.info,
639
+ heads=set(head_maintainer.heads),
640
+ run_args=kw,
641
+ )
642
+
643
+ if self.as_sql and not head_maintainer.heads:
644
+ assert self.connection is not None
645
+ self._version.drop(self.connection)
646
+
647
+ def _in_connection_transaction(self) -> bool:
648
+ try:
649
+ meth = self.connection.in_transaction # type:ignore[union-attr]
650
+ except AttributeError:
651
+ return False
652
+ else:
653
+ return meth()
654
+
655
+ def execute(
656
+ self,
657
+ sql: Union[Executable, str],
658
+ execution_options: Optional[Dict[str, Any]] = None,
659
+ ) -> None:
660
+ """Execute a SQL construct or string statement.
661
+
662
+ The underlying execution mechanics are used, that is
663
+ if this is "offline mode" the SQL is written to the
664
+ output buffer, otherwise the SQL is emitted on
665
+ the current SQLAlchemy connection.
666
+
667
+ """
668
+ self.impl._exec(sql, execution_options)
669
+
670
+ def _stdout_connection(
671
+ self, connection: Optional[Connection]
672
+ ) -> MockConnection:
673
+ def dump(construct, *multiparams, **params):
674
+ self.impl._exec(construct)
675
+
676
+ return MockEngineStrategy.MockConnection(self.dialect, dump)
677
+
678
+ @property
679
+ def bind(self) -> Optional[Connection]:
680
+ """Return the current "bind".
681
+
682
+ In online mode, this is an instance of
683
+ :class:`sqlalchemy.engine.Connection`, and is suitable
684
+ for ad-hoc execution of any kind of usage described
685
+ in SQLAlchemy Core documentation as well as
686
+ for usage with the :meth:`sqlalchemy.schema.Table.create`
687
+ and :meth:`sqlalchemy.schema.MetaData.create_all` methods
688
+ of :class:`~sqlalchemy.schema.Table`,
689
+ :class:`~sqlalchemy.schema.MetaData`.
690
+
691
+ Note that when "standard output" mode is enabled,
692
+ this bind will be a "mock" connection handler that cannot
693
+ return results and is only appropriate for a very limited
694
+ subset of commands.
695
+
696
+ """
697
+ return self.connection
698
+
699
+ @property
700
+ def config(self) -> Optional[Config]:
701
+ """Return the :class:`.Config` used by the current environment,
702
+ if any."""
703
+
704
+ if self.environment_context:
705
+ return self.environment_context.config
706
+ else:
707
+ return None
708
+
709
+ def _compare_type(
710
+ self, inspector_column: Column[Any], metadata_column: Column
711
+ ) -> bool:
712
+ if self._user_compare_type is False:
713
+ return False
714
+
715
+ if callable(self._user_compare_type):
716
+ user_value = self._user_compare_type(
717
+ self,
718
+ inspector_column,
719
+ metadata_column,
720
+ inspector_column.type,
721
+ metadata_column.type,
722
+ )
723
+ if user_value is not None:
724
+ return user_value
725
+
726
+ return self.impl.compare_type(inspector_column, metadata_column)
727
+
728
+ def _compare_server_default(
729
+ self,
730
+ inspector_column: Column[Any],
731
+ metadata_column: Column[Any],
732
+ rendered_metadata_default: Optional[str],
733
+ rendered_column_default: Optional[str],
734
+ ) -> bool:
735
+ if self._user_compare_server_default is False:
736
+ return False
737
+
738
+ if callable(self._user_compare_server_default):
739
+ user_value = self._user_compare_server_default(
740
+ self,
741
+ inspector_column,
742
+ metadata_column,
743
+ rendered_column_default,
744
+ metadata_column.server_default,
745
+ rendered_metadata_default,
746
+ )
747
+ if user_value is not None:
748
+ return user_value
749
+
750
+ return self.impl.compare_server_default(
751
+ inspector_column,
752
+ metadata_column,
753
+ rendered_metadata_default,
754
+ rendered_column_default,
755
+ )
756
+
757
+
758
+ class HeadMaintainer:
759
+ def __init__(self, context: MigrationContext, heads: Any) -> None:
760
+ self.context = context
761
+ self.heads = set(heads)
762
+
763
+ def _insert_version(self, version: str) -> None:
764
+ assert version not in self.heads
765
+ self.heads.add(version)
766
+
767
+ self.context.impl._exec(
768
+ self.context._version.insert().values(
769
+ version_num=literal_column("'%s'" % version)
770
+ )
771
+ )
772
+
773
+ def _delete_version(self, version: str) -> None:
774
+ self.heads.remove(version)
775
+
776
+ ret = self.context.impl._exec(
777
+ self.context._version.delete().where(
778
+ self.context._version.c.version_num
779
+ == literal_column("'%s'" % version)
780
+ )
781
+ )
782
+
783
+ if (
784
+ not self.context.as_sql
785
+ and self.context.dialect.supports_sane_rowcount
786
+ and ret is not None
787
+ and ret.rowcount != 1
788
+ ):
789
+ raise util.CommandError(
790
+ "Online migration expected to match one "
791
+ "row when deleting '%s' in '%s'; "
792
+ "%d found"
793
+ % (version, self.context.version_table, ret.rowcount)
794
+ )
795
+
796
+ def _update_version(self, from_: str, to_: str) -> None:
797
+ assert to_ not in self.heads
798
+ self.heads.remove(from_)
799
+ self.heads.add(to_)
800
+
801
+ ret = self.context.impl._exec(
802
+ self.context._version.update()
803
+ .values(version_num=literal_column("'%s'" % to_))
804
+ .where(
805
+ self.context._version.c.version_num
806
+ == literal_column("'%s'" % from_)
807
+ )
808
+ )
809
+
810
+ if (
811
+ not self.context.as_sql
812
+ and self.context.dialect.supports_sane_rowcount
813
+ and ret is not None
814
+ and ret.rowcount != 1
815
+ ):
816
+ raise util.CommandError(
817
+ "Online migration expected to match one "
818
+ "row when updating '%s' to '%s' in '%s'; "
819
+ "%d found"
820
+ % (from_, to_, self.context.version_table, ret.rowcount)
821
+ )
822
+
823
+ def update_to_step(self, step: Union[RevisionStep, StampStep]) -> None:
824
+ if step.should_delete_branch(self.heads):
825
+ vers = step.delete_version_num
826
+ log.debug("branch delete %s", vers)
827
+ self._delete_version(vers)
828
+ elif step.should_create_branch(self.heads):
829
+ vers = step.insert_version_num
830
+ log.debug("new branch insert %s", vers)
831
+ self._insert_version(vers)
832
+ elif step.should_merge_branches(self.heads):
833
+ # delete revs, update from rev, update to rev
834
+ (
835
+ delete_revs,
836
+ update_from_rev,
837
+ update_to_rev,
838
+ ) = step.merge_branch_idents(self.heads)
839
+ log.debug(
840
+ "merge, delete %s, update %s to %s",
841
+ delete_revs,
842
+ update_from_rev,
843
+ update_to_rev,
844
+ )
845
+ for delrev in delete_revs:
846
+ self._delete_version(delrev)
847
+ self._update_version(update_from_rev, update_to_rev)
848
+ elif step.should_unmerge_branches(self.heads):
849
+ (
850
+ update_from_rev,
851
+ update_to_rev,
852
+ insert_revs,
853
+ ) = step.unmerge_branch_idents(self.heads)
854
+ log.debug(
855
+ "unmerge, insert %s, update %s to %s",
856
+ insert_revs,
857
+ update_from_rev,
858
+ update_to_rev,
859
+ )
860
+ for insrev in insert_revs:
861
+ self._insert_version(insrev)
862
+ self._update_version(update_from_rev, update_to_rev)
863
+ else:
864
+ from_, to_ = step.update_version_num(self.heads)
865
+ log.debug("update %s to %s", from_, to_)
866
+ self._update_version(from_, to_)
867
+
868
+
869
+ class MigrationInfo:
870
+ """Exposes information about a migration step to a callback listener.
871
+
872
+ The :class:`.MigrationInfo` object is available exclusively for the
873
+ benefit of the :paramref:`.EnvironmentContext.on_version_apply`
874
+ callback hook.
875
+
876
+ """
877
+
878
+ is_upgrade: bool
879
+ """True/False: indicates whether this operation ascends or descends the
880
+ version tree."""
881
+
882
+ is_stamp: bool
883
+ """True/False: indicates whether this operation is a stamp (i.e. whether
884
+ it results in any actual database operations)."""
885
+
886
+ up_revision_id: Optional[str]
887
+ """Version string corresponding to :attr:`.Revision.revision`.
888
+
889
+ In the case of a stamp operation, it is advised to use the
890
+ :attr:`.MigrationInfo.up_revision_ids` tuple as a stamp operation can
891
+ make a single movement from one or more branches down to a single
892
+ branchpoint, in which case there will be multiple "up" revisions.
893
+
894
+ .. seealso::
895
+
896
+ :attr:`.MigrationInfo.up_revision_ids`
897
+
898
+ """
899
+
900
+ up_revision_ids: Tuple[str, ...]
901
+ """Tuple of version strings corresponding to :attr:`.Revision.revision`.
902
+
903
+ In the majority of cases, this tuple will be a single value, synonymous
904
+ with the scalar value of :attr:`.MigrationInfo.up_revision_id`.
905
+ It can be multiple revision identifiers only in the case of an
906
+ ``alembic stamp`` operation which is moving downwards from multiple
907
+ branches down to their common branch point.
908
+
909
+ """
910
+
911
+ down_revision_ids: Tuple[str, ...]
912
+ """Tuple of strings representing the base revisions of this migration step.
913
+
914
+ If empty, this represents a root revision; otherwise, the first item
915
+ corresponds to :attr:`.Revision.down_revision`, and the rest are inferred
916
+ from dependencies.
917
+ """
918
+
919
+ revision_map: RevisionMap
920
+ """The revision map inside of which this operation occurs."""
921
+
922
+ def __init__(
923
+ self,
924
+ revision_map: RevisionMap,
925
+ is_upgrade: bool,
926
+ is_stamp: bool,
927
+ up_revisions: Union[str, Tuple[str, ...]],
928
+ down_revisions: Union[str, Tuple[str, ...]],
929
+ ) -> None:
930
+ self.revision_map = revision_map
931
+ self.is_upgrade = is_upgrade
932
+ self.is_stamp = is_stamp
933
+ self.up_revision_ids = util.to_tuple(up_revisions, default=())
934
+ if self.up_revision_ids:
935
+ self.up_revision_id = self.up_revision_ids[0]
936
+ else:
937
+ # this should never be the case with
938
+ # "upgrade", "downgrade", or "stamp" as we are always
939
+ # measuring movement in terms of at least one upgrade version
940
+ self.up_revision_id = None
941
+ self.down_revision_ids = util.to_tuple(down_revisions, default=())
942
+
943
+ @property
944
+ def is_migration(self) -> bool:
945
+ """True/False: indicates whether this operation is a migration.
946
+
947
+ At present this is true if and only the migration is not a stamp.
948
+ If other operation types are added in the future, both this attribute
949
+ and :attr:`~.MigrationInfo.is_stamp` will be false.
950
+ """
951
+ return not self.is_stamp
952
+
953
+ @property
954
+ def source_revision_ids(self) -> Tuple[str, ...]:
955
+ """Active revisions before this migration step is applied."""
956
+ return (
957
+ self.down_revision_ids if self.is_upgrade else self.up_revision_ids
958
+ )
959
+
960
+ @property
961
+ def destination_revision_ids(self) -> Tuple[str, ...]:
962
+ """Active revisions after this migration step is applied."""
963
+ return (
964
+ self.up_revision_ids if self.is_upgrade else self.down_revision_ids
965
+ )
966
+
967
+ @property
968
+ def up_revision(self) -> Optional[Revision]:
969
+ """Get :attr:`~.MigrationInfo.up_revision_id` as
970
+ a :class:`.Revision`.
971
+
972
+ """
973
+ return self.revision_map.get_revision(self.up_revision_id)
974
+
975
+ @property
976
+ def up_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
977
+ """Get :attr:`~.MigrationInfo.up_revision_ids` as a
978
+ :class:`.Revision`."""
979
+ return self.revision_map.get_revisions(self.up_revision_ids)
980
+
981
+ @property
982
+ def down_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
983
+ """Get :attr:`~.MigrationInfo.down_revision_ids` as a tuple of
984
+ :class:`Revisions <.Revision>`."""
985
+ return self.revision_map.get_revisions(self.down_revision_ids)
986
+
987
+ @property
988
+ def source_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
989
+ """Get :attr:`~MigrationInfo.source_revision_ids` as a tuple of
990
+ :class:`Revisions <.Revision>`."""
991
+ return self.revision_map.get_revisions(self.source_revision_ids)
992
+
993
+ @property
994
+ def destination_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]:
995
+ """Get :attr:`~MigrationInfo.destination_revision_ids` as a tuple of
996
+ :class:`Revisions <.Revision>`."""
997
+ return self.revision_map.get_revisions(self.destination_revision_ids)
998
+
999
+
1000
+ class MigrationStep:
1001
+ from_revisions_no_deps: Tuple[str, ...]
1002
+ to_revisions_no_deps: Tuple[str, ...]
1003
+ is_upgrade: bool
1004
+ migration_fn: Any
1005
+
1006
+ if TYPE_CHECKING:
1007
+
1008
+ @property
1009
+ def doc(self) -> Optional[str]: ...
1010
+
1011
+ @property
1012
+ def name(self) -> str:
1013
+ return self.migration_fn.__name__
1014
+
1015
+ @classmethod
1016
+ def upgrade_from_script(
1017
+ cls, revision_map: RevisionMap, script: Script
1018
+ ) -> RevisionStep:
1019
+ return RevisionStep(revision_map, script, True)
1020
+
1021
+ @classmethod
1022
+ def downgrade_from_script(
1023
+ cls, revision_map: RevisionMap, script: Script
1024
+ ) -> RevisionStep:
1025
+ return RevisionStep(revision_map, script, False)
1026
+
1027
+ @property
1028
+ def is_downgrade(self) -> bool:
1029
+ return not self.is_upgrade
1030
+
1031
+ @property
1032
+ def short_log(self) -> str:
1033
+ return "%s %s -> %s" % (
1034
+ self.name,
1035
+ util.format_as_comma(self.from_revisions_no_deps),
1036
+ util.format_as_comma(self.to_revisions_no_deps),
1037
+ )
1038
+
1039
+ def __str__(self):
1040
+ if self.doc:
1041
+ return "%s %s -> %s, %s" % (
1042
+ self.name,
1043
+ util.format_as_comma(self.from_revisions_no_deps),
1044
+ util.format_as_comma(self.to_revisions_no_deps),
1045
+ self.doc,
1046
+ )
1047
+ else:
1048
+ return self.short_log
1049
+
1050
+
1051
+ class RevisionStep(MigrationStep):
1052
+ def __init__(
1053
+ self, revision_map: RevisionMap, revision: Script, is_upgrade: bool
1054
+ ) -> None:
1055
+ self.revision_map = revision_map
1056
+ self.revision = revision
1057
+ self.is_upgrade = is_upgrade
1058
+ if is_upgrade:
1059
+ self.migration_fn = revision.module.upgrade
1060
+ else:
1061
+ self.migration_fn = revision.module.downgrade
1062
+
1063
+ def __repr__(self):
1064
+ return "RevisionStep(%r, is_upgrade=%r)" % (
1065
+ self.revision.revision,
1066
+ self.is_upgrade,
1067
+ )
1068
+
1069
+ def __eq__(self, other: object) -> bool:
1070
+ return (
1071
+ isinstance(other, RevisionStep)
1072
+ and other.revision == self.revision
1073
+ and self.is_upgrade == other.is_upgrade
1074
+ )
1075
+
1076
+ @property
1077
+ def doc(self) -> Optional[str]:
1078
+ return self.revision.doc
1079
+
1080
+ @property
1081
+ def from_revisions(self) -> Tuple[str, ...]:
1082
+ if self.is_upgrade:
1083
+ return self.revision._normalized_down_revisions
1084
+ else:
1085
+ return (self.revision.revision,)
1086
+
1087
+ @property
1088
+ def from_revisions_no_deps( # type:ignore[override]
1089
+ self,
1090
+ ) -> Tuple[str, ...]:
1091
+ if self.is_upgrade:
1092
+ return self.revision._versioned_down_revisions
1093
+ else:
1094
+ return (self.revision.revision,)
1095
+
1096
+ @property
1097
+ def to_revisions(self) -> Tuple[str, ...]:
1098
+ if self.is_upgrade:
1099
+ return (self.revision.revision,)
1100
+ else:
1101
+ return self.revision._normalized_down_revisions
1102
+
1103
+ @property
1104
+ def to_revisions_no_deps( # type:ignore[override]
1105
+ self,
1106
+ ) -> Tuple[str, ...]:
1107
+ if self.is_upgrade:
1108
+ return (self.revision.revision,)
1109
+ else:
1110
+ return self.revision._versioned_down_revisions
1111
+
1112
+ @property
1113
+ def _has_scalar_down_revision(self) -> bool:
1114
+ return len(self.revision._normalized_down_revisions) == 1
1115
+
1116
+ def should_delete_branch(self, heads: Set[str]) -> bool:
1117
+ """A delete is when we are a. in a downgrade and b.
1118
+ we are going to the "base" or we are going to a version that
1119
+ is implied as a dependency on another version that is remaining.
1120
+
1121
+ """
1122
+ if not self.is_downgrade:
1123
+ return False
1124
+
1125
+ if self.revision.revision not in heads:
1126
+ return False
1127
+
1128
+ downrevs = self.revision._normalized_down_revisions
1129
+
1130
+ if not downrevs:
1131
+ # is a base
1132
+ return True
1133
+ else:
1134
+ # determine what the ultimate "to_revisions" for an
1135
+ # unmerge would be. If there are none, then we're a delete.
1136
+ to_revisions = self._unmerge_to_revisions(heads)
1137
+ return not to_revisions
1138
+
1139
+ def merge_branch_idents(
1140
+ self, heads: Set[str]
1141
+ ) -> Tuple[List[str], str, str]:
1142
+ other_heads = set(heads).difference(self.from_revisions)
1143
+
1144
+ if other_heads:
1145
+ ancestors = {
1146
+ r.revision
1147
+ for r in self.revision_map._get_ancestor_nodes(
1148
+ self.revision_map.get_revisions(other_heads), check=False
1149
+ )
1150
+ }
1151
+ from_revisions = list(
1152
+ set(self.from_revisions).difference(ancestors)
1153
+ )
1154
+ else:
1155
+ from_revisions = list(self.from_revisions)
1156
+
1157
+ return (
1158
+ # delete revs, update from rev, update to rev
1159
+ list(from_revisions[0:-1]),
1160
+ from_revisions[-1],
1161
+ self.to_revisions[0],
1162
+ )
1163
+
1164
+ def _unmerge_to_revisions(self, heads: Set[str]) -> Tuple[str, ...]:
1165
+ other_heads = set(heads).difference([self.revision.revision])
1166
+ if other_heads:
1167
+ ancestors = {
1168
+ r.revision
1169
+ for r in self.revision_map._get_ancestor_nodes(
1170
+ self.revision_map.get_revisions(other_heads), check=False
1171
+ )
1172
+ }
1173
+ return tuple(set(self.to_revisions).difference(ancestors))
1174
+ else:
1175
+ # for each revision we plan to return, compute its ancestors
1176
+ # (excluding self), and remove those from the final output since
1177
+ # they are already accounted for.
1178
+ ancestors = {
1179
+ r.revision
1180
+ for to_revision in self.to_revisions
1181
+ for r in self.revision_map._get_ancestor_nodes(
1182
+ self.revision_map.get_revisions(to_revision), check=False
1183
+ )
1184
+ if r.revision != to_revision
1185
+ }
1186
+ return tuple(set(self.to_revisions).difference(ancestors))
1187
+
1188
+ def unmerge_branch_idents(
1189
+ self, heads: Set[str]
1190
+ ) -> Tuple[str, str, Tuple[str, ...]]:
1191
+ to_revisions = self._unmerge_to_revisions(heads)
1192
+
1193
+ return (
1194
+ # update from rev, update to rev, insert revs
1195
+ self.from_revisions[0],
1196
+ to_revisions[-1],
1197
+ to_revisions[0:-1],
1198
+ )
1199
+
1200
+ def should_create_branch(self, heads: Set[str]) -> bool:
1201
+ if not self.is_upgrade:
1202
+ return False
1203
+
1204
+ downrevs = self.revision._normalized_down_revisions
1205
+
1206
+ if not downrevs:
1207
+ # is a base
1208
+ return True
1209
+ else:
1210
+ # none of our downrevs are present, so...
1211
+ # we have to insert our version. This is true whether
1212
+ # or not there is only one downrev, or multiple (in the latter
1213
+ # case, we're a merge point.)
1214
+ if not heads.intersection(downrevs):
1215
+ return True
1216
+ else:
1217
+ return False
1218
+
1219
+ def should_merge_branches(self, heads: Set[str]) -> bool:
1220
+ if not self.is_upgrade:
1221
+ return False
1222
+
1223
+ downrevs = self.revision._normalized_down_revisions
1224
+
1225
+ if len(downrevs) > 1 and len(heads.intersection(downrevs)) > 1:
1226
+ return True
1227
+
1228
+ return False
1229
+
1230
+ def should_unmerge_branches(self, heads: Set[str]) -> bool:
1231
+ if not self.is_downgrade:
1232
+ return False
1233
+
1234
+ downrevs = self.revision._normalized_down_revisions
1235
+
1236
+ if self.revision.revision in heads and len(downrevs) > 1:
1237
+ return True
1238
+
1239
+ return False
1240
+
1241
+ def update_version_num(self, heads: Set[str]) -> Tuple[str, str]:
1242
+ if not self._has_scalar_down_revision:
1243
+ downrev = heads.intersection(
1244
+ self.revision._normalized_down_revisions
1245
+ )
1246
+ assert (
1247
+ len(downrev) == 1
1248
+ ), "Can't do an UPDATE because downrevision is ambiguous"
1249
+ down_revision = list(downrev)[0]
1250
+ else:
1251
+ down_revision = self.revision._normalized_down_revisions[0]
1252
+
1253
+ if self.is_upgrade:
1254
+ return down_revision, self.revision.revision
1255
+ else:
1256
+ return self.revision.revision, down_revision
1257
+
1258
+ @property
1259
+ def delete_version_num(self) -> str:
1260
+ return self.revision.revision
1261
+
1262
+ @property
1263
+ def insert_version_num(self) -> str:
1264
+ return self.revision.revision
1265
+
1266
+ @property
1267
+ def info(self) -> MigrationInfo:
1268
+ return MigrationInfo(
1269
+ revision_map=self.revision_map,
1270
+ up_revisions=self.revision.revision,
1271
+ down_revisions=self.revision._normalized_down_revisions,
1272
+ is_upgrade=self.is_upgrade,
1273
+ is_stamp=False,
1274
+ )
1275
+
1276
+
1277
+ class StampStep(MigrationStep):
1278
+ def __init__(
1279
+ self,
1280
+ from_: Optional[Union[str, Collection[str]]],
1281
+ to_: Optional[Union[str, Collection[str]]],
1282
+ is_upgrade: bool,
1283
+ branch_move: bool,
1284
+ revision_map: Optional[RevisionMap] = None,
1285
+ ) -> None:
1286
+ self.from_: Tuple[str, ...] = util.to_tuple(from_, default=())
1287
+ self.to_: Tuple[str, ...] = util.to_tuple(to_, default=())
1288
+ self.is_upgrade = is_upgrade
1289
+ self.branch_move = branch_move
1290
+ self.migration_fn = self.stamp_revision
1291
+ self.revision_map = revision_map
1292
+
1293
+ doc: Optional[str] = None
1294
+
1295
+ def stamp_revision(self, **kw: Any) -> None:
1296
+ return None
1297
+
1298
+ def __eq__(self, other):
1299
+ return (
1300
+ isinstance(other, StampStep)
1301
+ and other.from_revisions == self.from_revisions
1302
+ and other.to_revisions == self.to_revisions
1303
+ and other.branch_move == self.branch_move
1304
+ and self.is_upgrade == other.is_upgrade
1305
+ )
1306
+
1307
+ @property
1308
+ def from_revisions(self):
1309
+ return self.from_
1310
+
1311
+ @property
1312
+ def to_revisions(self) -> Tuple[str, ...]:
1313
+ return self.to_
1314
+
1315
+ @property
1316
+ def from_revisions_no_deps( # type:ignore[override]
1317
+ self,
1318
+ ) -> Tuple[str, ...]:
1319
+ return self.from_
1320
+
1321
+ @property
1322
+ def to_revisions_no_deps( # type:ignore[override]
1323
+ self,
1324
+ ) -> Tuple[str, ...]:
1325
+ return self.to_
1326
+
1327
+ @property
1328
+ def delete_version_num(self) -> str:
1329
+ assert len(self.from_) == 1
1330
+ return self.from_[0]
1331
+
1332
+ @property
1333
+ def insert_version_num(self) -> str:
1334
+ assert len(self.to_) == 1
1335
+ return self.to_[0]
1336
+
1337
+ def update_version_num(self, heads: Set[str]) -> Tuple[str, str]:
1338
+ assert len(self.from_) == 1
1339
+ assert len(self.to_) == 1
1340
+ return self.from_[0], self.to_[0]
1341
+
1342
+ def merge_branch_idents(
1343
+ self, heads: Union[Set[str], List[str]]
1344
+ ) -> Union[Tuple[List[Any], str, str], Tuple[List[str], str, str]]:
1345
+ return (
1346
+ # delete revs, update from rev, update to rev
1347
+ list(self.from_[0:-1]),
1348
+ self.from_[-1],
1349
+ self.to_[0],
1350
+ )
1351
+
1352
+ def unmerge_branch_idents(
1353
+ self, heads: Set[str]
1354
+ ) -> Tuple[str, str, List[str]]:
1355
+ return (
1356
+ # update from rev, update to rev, insert revs
1357
+ self.from_[0],
1358
+ self.to_[-1],
1359
+ list(self.to_[0:-1]),
1360
+ )
1361
+
1362
+ def should_delete_branch(self, heads: Set[str]) -> bool:
1363
+ # TODO: we probably need to look for self.to_ inside of heads,
1364
+ # in a similar manner as should_create_branch, however we have
1365
+ # no tests for this yet (stamp downgrades w/ branches)
1366
+ return self.is_downgrade and self.branch_move
1367
+
1368
+ def should_create_branch(self, heads: Set[str]) -> Union[Set[str], bool]:
1369
+ return (
1370
+ self.is_upgrade
1371
+ and (self.branch_move or set(self.from_).difference(heads))
1372
+ and set(self.to_).difference(heads)
1373
+ )
1374
+
1375
+ def should_merge_branches(self, heads: Set[str]) -> bool:
1376
+ return len(self.from_) > 1
1377
+
1378
+ def should_unmerge_branches(self, heads: Set[str]) -> bool:
1379
+ return len(self.to_) > 1
1380
+
1381
+ @property
1382
+ def info(self) -> MigrationInfo:
1383
+ up, down = (
1384
+ (self.to_, self.from_)
1385
+ if self.is_upgrade
1386
+ else (self.from_, self.to_)
1387
+ )
1388
+ assert self.revision_map is not None
1389
+ return MigrationInfo(
1390
+ revision_map=self.revision_map,
1391
+ up_revisions=up,
1392
+ down_revisions=down,
1393
+ is_upgrade=self.is_upgrade,
1394
+ is_stamp=True,
1395
+ )
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/script/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from .base import Script
2
+ from .base import ScriptDirectory
3
+
4
+ __all__ = ["ScriptDirectory", "Script"]
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/script/base.py ADDED
@@ -0,0 +1,1055 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from contextlib import contextmanager
4
+ import datetime
5
+ import os
6
+ from pathlib import Path
7
+ import re
8
+ import shutil
9
+ import sys
10
+ from types import ModuleType
11
+ from typing import Any
12
+ from typing import cast
13
+ from typing import Iterator
14
+ from typing import List
15
+ from typing import Optional
16
+ from typing import Sequence
17
+ from typing import Set
18
+ from typing import Tuple
19
+ from typing import TYPE_CHECKING
20
+ from typing import Union
21
+
22
+ from . import revision
23
+ from . import write_hooks
24
+ from .. import util
25
+ from ..runtime import migration
26
+ from ..util import compat
27
+ from ..util import not_none
28
+ from ..util.pyfiles import _preserving_path_as_str
29
+
30
+ if TYPE_CHECKING:
31
+ from .revision import _GetRevArg
32
+ from .revision import _RevIdType
33
+ from .revision import Revision
34
+ from ..config import Config
35
+ from ..config import MessagingOptions
36
+ from ..config import PostWriteHookConfig
37
+ from ..runtime.migration import RevisionStep
38
+ from ..runtime.migration import StampStep
39
+
40
+ try:
41
+ if compat.py39:
42
+ from zoneinfo import ZoneInfo
43
+ from zoneinfo import ZoneInfoNotFoundError
44
+ else:
45
+ from backports.zoneinfo import ZoneInfo # type: ignore[import-not-found,no-redef] # noqa: E501
46
+ from backports.zoneinfo import ZoneInfoNotFoundError # type: ignore[no-redef] # noqa: E501
47
+ except ImportError:
48
+ ZoneInfo = None # type: ignore[assignment, misc]
49
+
50
+ _sourceless_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)(c|o)?$")
51
+ _only_source_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)$")
52
+ _legacy_rev = re.compile(r"([a-f0-9]+)\.py$")
53
+ _slug_re = re.compile(r"\w+")
54
+ _default_file_template = "%(rev)s_%(slug)s"
55
+
56
+
57
+ class ScriptDirectory:
58
+ """Provides operations upon an Alembic script directory.
59
+
60
+ This object is useful to get information as to current revisions,
61
+ most notably being able to get at the "head" revision, for schemes
62
+ that want to test if the current revision in the database is the most
63
+ recent::
64
+
65
+ from alembic.script import ScriptDirectory
66
+ from alembic.config import Config
67
+ config = Config()
68
+ config.set_main_option("script_location", "myapp:migrations")
69
+ script = ScriptDirectory.from_config(config)
70
+
71
+ head_revision = script.get_current_head()
72
+
73
+
74
+
75
+ """
76
+
77
+ def __init__(
78
+ self,
79
+ dir: Union[str, os.PathLike[str]], # noqa: A002
80
+ file_template: str = _default_file_template,
81
+ truncate_slug_length: Optional[int] = 40,
82
+ version_locations: Optional[
83
+ Sequence[Union[str, os.PathLike[str]]]
84
+ ] = None,
85
+ sourceless: bool = False,
86
+ output_encoding: str = "utf-8",
87
+ timezone: Optional[str] = None,
88
+ hooks: list[PostWriteHookConfig] = [],
89
+ recursive_version_locations: bool = False,
90
+ messaging_opts: MessagingOptions = cast(
91
+ "MessagingOptions", util.EMPTY_DICT
92
+ ),
93
+ ) -> None:
94
+ self.dir = _preserving_path_as_str(dir)
95
+ self.version_locations = [
96
+ _preserving_path_as_str(p) for p in version_locations or ()
97
+ ]
98
+ self.file_template = file_template
99
+ self.truncate_slug_length = truncate_slug_length or 40
100
+ self.sourceless = sourceless
101
+ self.output_encoding = output_encoding
102
+ self.revision_map = revision.RevisionMap(self._load_revisions)
103
+ self.timezone = timezone
104
+ self.hooks = hooks
105
+ self.recursive_version_locations = recursive_version_locations
106
+ self.messaging_opts = messaging_opts
107
+
108
+ if not os.access(dir, os.F_OK):
109
+ raise util.CommandError(
110
+ f"Path doesn't exist: {dir}. Please use "
111
+ "the 'init' command to create a new "
112
+ "scripts folder."
113
+ )
114
+
115
+ @property
116
+ def versions(self) -> str:
117
+ """return a single version location based on the sole path passed
118
+ within version_locations.
119
+
120
+ If multiple version locations are configured, an error is raised.
121
+
122
+
123
+ """
124
+ return str(self._singular_version_location)
125
+
126
+ @util.memoized_property
127
+ def _singular_version_location(self) -> Path:
128
+ loc = self._version_locations
129
+ if len(loc) > 1:
130
+ raise util.CommandError("Multiple version_locations present")
131
+ else:
132
+ return loc[0]
133
+
134
+ @util.memoized_property
135
+ def _version_locations(self) -> Sequence[Path]:
136
+ if self.version_locations:
137
+ return [
138
+ util.coerce_resource_to_filename(location).absolute()
139
+ for location in self.version_locations
140
+ ]
141
+ else:
142
+ return [Path(self.dir, "versions").absolute()]
143
+
144
+ def _load_revisions(self) -> Iterator[Script]:
145
+ paths = [vers for vers in self._version_locations if vers.exists()]
146
+
147
+ dupes = set()
148
+ for vers in paths:
149
+ for file_path in Script._list_py_dir(self, vers):
150
+ real_path = file_path.resolve()
151
+ if real_path in dupes:
152
+ util.warn(
153
+ f"File {real_path} loaded twice! ignoring. "
154
+ "Please ensure version_locations is unique."
155
+ )
156
+ continue
157
+ dupes.add(real_path)
158
+
159
+ script = Script._from_path(self, real_path)
160
+ if script is None:
161
+ continue
162
+ yield script
163
+
164
+ @classmethod
165
+ def from_config(cls, config: Config) -> ScriptDirectory:
166
+ """Produce a new :class:`.ScriptDirectory` given a :class:`.Config`
167
+ instance.
168
+
169
+ The :class:`.Config` need only have the ``script_location`` key
170
+ present.
171
+
172
+ """
173
+ script_location = config.get_alembic_option("script_location")
174
+ if script_location is None:
175
+ raise util.CommandError(
176
+ "No 'script_location' key found in configuration."
177
+ )
178
+ truncate_slug_length: Optional[int]
179
+ tsl = config.get_alembic_option("truncate_slug_length")
180
+ if tsl is not None:
181
+ truncate_slug_length = int(tsl)
182
+ else:
183
+ truncate_slug_length = None
184
+
185
+ prepend_sys_path = config.get_prepend_sys_paths_list()
186
+ if prepend_sys_path:
187
+ sys.path[:0] = prepend_sys_path
188
+
189
+ rvl = config.get_alembic_boolean_option("recursive_version_locations")
190
+ return ScriptDirectory(
191
+ util.coerce_resource_to_filename(script_location),
192
+ file_template=config.get_alembic_option(
193
+ "file_template", _default_file_template
194
+ ),
195
+ truncate_slug_length=truncate_slug_length,
196
+ sourceless=config.get_alembic_boolean_option("sourceless"),
197
+ output_encoding=config.get_alembic_option(
198
+ "output_encoding", "utf-8"
199
+ ),
200
+ version_locations=config.get_version_locations_list(),
201
+ timezone=config.get_alembic_option("timezone"),
202
+ hooks=config.get_hooks_list(),
203
+ recursive_version_locations=rvl,
204
+ messaging_opts=config.messaging_opts,
205
+ )
206
+
207
+ @contextmanager
208
+ def _catch_revision_errors(
209
+ self,
210
+ ancestor: Optional[str] = None,
211
+ multiple_heads: Optional[str] = None,
212
+ start: Optional[str] = None,
213
+ end: Optional[str] = None,
214
+ resolution: Optional[str] = None,
215
+ ) -> Iterator[None]:
216
+ try:
217
+ yield
218
+ except revision.RangeNotAncestorError as rna:
219
+ if start is None:
220
+ start = cast(Any, rna.lower)
221
+ if end is None:
222
+ end = cast(Any, rna.upper)
223
+ if not ancestor:
224
+ ancestor = (
225
+ "Requested range %(start)s:%(end)s does not refer to "
226
+ "ancestor/descendant revisions along the same branch"
227
+ )
228
+ ancestor = ancestor % {"start": start, "end": end}
229
+ raise util.CommandError(ancestor) from rna
230
+ except revision.MultipleHeads as mh:
231
+ if not multiple_heads:
232
+ multiple_heads = (
233
+ "Multiple head revisions are present for given "
234
+ "argument '%(head_arg)s'; please "
235
+ "specify a specific target revision, "
236
+ "'<branchname>@%(head_arg)s' to "
237
+ "narrow to a specific head, or 'heads' for all heads"
238
+ )
239
+ multiple_heads = multiple_heads % {
240
+ "head_arg": end or mh.argument,
241
+ "heads": util.format_as_comma(mh.heads),
242
+ }
243
+ raise util.CommandError(multiple_heads) from mh
244
+ except revision.ResolutionError as re:
245
+ if resolution is None:
246
+ resolution = "Can't locate revision identified by '%s'" % (
247
+ re.argument
248
+ )
249
+ raise util.CommandError(resolution) from re
250
+ except revision.RevisionError as err:
251
+ raise util.CommandError(err.args[0]) from err
252
+
253
+ def walk_revisions(
254
+ self, base: str = "base", head: str = "heads"
255
+ ) -> Iterator[Script]:
256
+ """Iterate through all revisions.
257
+
258
+ :param base: the base revision, or "base" to start from the
259
+ empty revision.
260
+
261
+ :param head: the head revision; defaults to "heads" to indicate
262
+ all head revisions. May also be "head" to indicate a single
263
+ head revision.
264
+
265
+ """
266
+ with self._catch_revision_errors(start=base, end=head):
267
+ for rev in self.revision_map.iterate_revisions(
268
+ head, base, inclusive=True, assert_relative_length=False
269
+ ):
270
+ yield cast(Script, rev)
271
+
272
+ def get_revisions(self, id_: _GetRevArg) -> Tuple[Script, ...]:
273
+ """Return the :class:`.Script` instance with the given rev identifier,
274
+ symbolic name, or sequence of identifiers.
275
+
276
+ """
277
+ with self._catch_revision_errors():
278
+ return cast(
279
+ Tuple[Script, ...],
280
+ self.revision_map.get_revisions(id_),
281
+ )
282
+
283
+ def get_all_current(self, id_: Tuple[str, ...]) -> Set[Script]:
284
+ with self._catch_revision_errors():
285
+ return cast(Set[Script], self.revision_map._get_all_current(id_))
286
+
287
+ def get_revision(self, id_: str) -> Script:
288
+ """Return the :class:`.Script` instance with the given rev id.
289
+
290
+ .. seealso::
291
+
292
+ :meth:`.ScriptDirectory.get_revisions`
293
+
294
+ """
295
+
296
+ with self._catch_revision_errors():
297
+ return cast(Script, self.revision_map.get_revision(id_))
298
+
299
+ def as_revision_number(
300
+ self, id_: Optional[str]
301
+ ) -> Optional[Union[str, Tuple[str, ...]]]:
302
+ """Convert a symbolic revision, i.e. 'head' or 'base', into
303
+ an actual revision number."""
304
+
305
+ with self._catch_revision_errors():
306
+ rev, branch_name = self.revision_map._resolve_revision_number(id_)
307
+
308
+ if not rev:
309
+ # convert () to None
310
+ return None
311
+ elif id_ == "heads":
312
+ return rev
313
+ else:
314
+ return rev[0]
315
+
316
+ def iterate_revisions(
317
+ self,
318
+ upper: Union[str, Tuple[str, ...], None],
319
+ lower: Union[str, Tuple[str, ...], None],
320
+ **kw: Any,
321
+ ) -> Iterator[Script]:
322
+ """Iterate through script revisions, starting at the given
323
+ upper revision identifier and ending at the lower.
324
+
325
+ The traversal uses strictly the `down_revision`
326
+ marker inside each migration script, so
327
+ it is a requirement that upper >= lower,
328
+ else you'll get nothing back.
329
+
330
+ The iterator yields :class:`.Script` objects.
331
+
332
+ .. seealso::
333
+
334
+ :meth:`.RevisionMap.iterate_revisions`
335
+
336
+ """
337
+ return cast(
338
+ Iterator[Script],
339
+ self.revision_map.iterate_revisions(upper, lower, **kw),
340
+ )
341
+
342
+ def get_current_head(self) -> Optional[str]:
343
+ """Return the current head revision.
344
+
345
+ If the script directory has multiple heads
346
+ due to branching, an error is raised;
347
+ :meth:`.ScriptDirectory.get_heads` should be
348
+ preferred.
349
+
350
+ :return: a string revision number.
351
+
352
+ .. seealso::
353
+
354
+ :meth:`.ScriptDirectory.get_heads`
355
+
356
+ """
357
+ with self._catch_revision_errors(
358
+ multiple_heads=(
359
+ "The script directory has multiple heads (due to branching)."
360
+ "Please use get_heads(), or merge the branches using "
361
+ "alembic merge."
362
+ )
363
+ ):
364
+ return self.revision_map.get_current_head()
365
+
366
+ def get_heads(self) -> List[str]:
367
+ """Return all "versioned head" revisions as strings.
368
+
369
+ This is normally a list of length one,
370
+ unless branches are present. The
371
+ :meth:`.ScriptDirectory.get_current_head()` method
372
+ can be used normally when a script directory
373
+ has only one head.
374
+
375
+ :return: a tuple of string revision numbers.
376
+ """
377
+ return list(self.revision_map.heads)
378
+
379
+ def get_base(self) -> Optional[str]:
380
+ """Return the "base" revision as a string.
381
+
382
+ This is the revision number of the script that
383
+ has a ``down_revision`` of None.
384
+
385
+ If the script directory has multiple bases, an error is raised;
386
+ :meth:`.ScriptDirectory.get_bases` should be
387
+ preferred.
388
+
389
+ """
390
+ bases = self.get_bases()
391
+ if len(bases) > 1:
392
+ raise util.CommandError(
393
+ "The script directory has multiple bases. "
394
+ "Please use get_bases()."
395
+ )
396
+ elif bases:
397
+ return bases[0]
398
+ else:
399
+ return None
400
+
401
+ def get_bases(self) -> List[str]:
402
+ """return all "base" revisions as strings.
403
+
404
+ This is the revision number of all scripts that
405
+ have a ``down_revision`` of None.
406
+
407
+ """
408
+ return list(self.revision_map.bases)
409
+
410
+ def _upgrade_revs(
411
+ self, destination: str, current_rev: str
412
+ ) -> List[RevisionStep]:
413
+ with self._catch_revision_errors(
414
+ ancestor="Destination %(end)s is not a valid upgrade "
415
+ "target from current head(s)",
416
+ end=destination,
417
+ ):
418
+ revs = self.iterate_revisions(
419
+ destination, current_rev, implicit_base=True
420
+ )
421
+ return [
422
+ migration.MigrationStep.upgrade_from_script(
423
+ self.revision_map, script
424
+ )
425
+ for script in reversed(list(revs))
426
+ ]
427
+
428
+ def _downgrade_revs(
429
+ self, destination: str, current_rev: Optional[str]
430
+ ) -> List[RevisionStep]:
431
+ with self._catch_revision_errors(
432
+ ancestor="Destination %(end)s is not a valid downgrade "
433
+ "target from current head(s)",
434
+ end=destination,
435
+ ):
436
+ revs = self.iterate_revisions(
437
+ current_rev, destination, select_for_downgrade=True
438
+ )
439
+ return [
440
+ migration.MigrationStep.downgrade_from_script(
441
+ self.revision_map, script
442
+ )
443
+ for script in revs
444
+ ]
445
+
446
+ def _stamp_revs(
447
+ self, revision: _RevIdType, heads: _RevIdType
448
+ ) -> List[StampStep]:
449
+ with self._catch_revision_errors(
450
+ multiple_heads="Multiple heads are present; please specify a "
451
+ "single target revision"
452
+ ):
453
+ heads_revs = self.get_revisions(heads)
454
+
455
+ steps = []
456
+
457
+ if not revision:
458
+ revision = "base"
459
+
460
+ filtered_heads: List[Script] = []
461
+ for rev in util.to_tuple(revision):
462
+ if rev:
463
+ filtered_heads.extend(
464
+ self.revision_map.filter_for_lineage(
465
+ cast(Sequence[Script], heads_revs),
466
+ rev,
467
+ include_dependencies=True,
468
+ )
469
+ )
470
+ filtered_heads = util.unique_list(filtered_heads)
471
+
472
+ dests = self.get_revisions(revision) or [None]
473
+
474
+ for dest in dests:
475
+ if dest is None:
476
+ # dest is 'base'. Return a "delete branch" migration
477
+ # for all applicable heads.
478
+ steps.extend(
479
+ [
480
+ migration.StampStep(
481
+ head.revision,
482
+ None,
483
+ False,
484
+ True,
485
+ self.revision_map,
486
+ )
487
+ for head in filtered_heads
488
+ ]
489
+ )
490
+ continue
491
+ elif dest in filtered_heads:
492
+ # the dest is already in the version table, do nothing.
493
+ continue
494
+
495
+ # figure out if the dest is a descendant or an
496
+ # ancestor of the selected nodes
497
+ descendants = set(
498
+ self.revision_map._get_descendant_nodes([dest])
499
+ )
500
+ ancestors = set(self.revision_map._get_ancestor_nodes([dest]))
501
+
502
+ if descendants.intersection(filtered_heads):
503
+ # heads are above the target, so this is a downgrade.
504
+ # we can treat them as a "merge", single step.
505
+ assert not ancestors.intersection(filtered_heads)
506
+ todo_heads = [head.revision for head in filtered_heads]
507
+ step = migration.StampStep(
508
+ todo_heads,
509
+ dest.revision,
510
+ False,
511
+ False,
512
+ self.revision_map,
513
+ )
514
+ steps.append(step)
515
+ continue
516
+ elif ancestors.intersection(filtered_heads):
517
+ # heads are below the target, so this is an upgrade.
518
+ # we can treat them as a "merge", single step.
519
+ todo_heads = [head.revision for head in filtered_heads]
520
+ step = migration.StampStep(
521
+ todo_heads,
522
+ dest.revision,
523
+ True,
524
+ False,
525
+ self.revision_map,
526
+ )
527
+ steps.append(step)
528
+ continue
529
+ else:
530
+ # destination is in a branch not represented,
531
+ # treat it as new branch
532
+ step = migration.StampStep(
533
+ (), dest.revision, True, True, self.revision_map
534
+ )
535
+ steps.append(step)
536
+ continue
537
+
538
+ return steps
539
+
540
+ def run_env(self) -> None:
541
+ """Run the script environment.
542
+
543
+ This basically runs the ``env.py`` script present
544
+ in the migration environment. It is called exclusively
545
+ by the command functions in :mod:`alembic.command`.
546
+
547
+
548
+ """
549
+ util.load_python_file(self.dir, "env.py")
550
+
551
+ @property
552
+ def env_py_location(self) -> str:
553
+ return str(Path(self.dir, "env.py"))
554
+
555
+ def _append_template(self, src: Path, dest: Path, **kw: Any) -> None:
556
+ with util.status(
557
+ f"Appending to existing {dest.absolute()}",
558
+ **self.messaging_opts,
559
+ ):
560
+ util.template_to_file(
561
+ src,
562
+ dest,
563
+ self.output_encoding,
564
+ append_with_newlines=True,
565
+ **kw,
566
+ )
567
+
568
+ def _generate_template(self, src: Path, dest: Path, **kw: Any) -> None:
569
+ with util.status(
570
+ f"Generating {dest.absolute()}", **self.messaging_opts
571
+ ):
572
+ util.template_to_file(src, dest, self.output_encoding, **kw)
573
+
574
+ def _copy_file(self, src: Path, dest: Path) -> None:
575
+ with util.status(
576
+ f"Generating {dest.absolute()}", **self.messaging_opts
577
+ ):
578
+ shutil.copy(src, dest)
579
+
580
+ def _ensure_directory(self, path: Path) -> None:
581
+ path = path.absolute()
582
+ if not path.exists():
583
+ with util.status(
584
+ f"Creating directory {path}", **self.messaging_opts
585
+ ):
586
+ os.makedirs(path)
587
+
588
+ def _generate_create_date(self) -> datetime.datetime:
589
+ if self.timezone is not None:
590
+ if ZoneInfo is None:
591
+ raise util.CommandError(
592
+ "Python >= 3.9 is required for timezone support or "
593
+ "the 'backports.zoneinfo' package must be installed."
594
+ )
595
+ # First, assume correct capitalization
596
+ try:
597
+ tzinfo = ZoneInfo(self.timezone)
598
+ except ZoneInfoNotFoundError:
599
+ tzinfo = None
600
+ if tzinfo is None:
601
+ try:
602
+ tzinfo = ZoneInfo(self.timezone.upper())
603
+ except ZoneInfoNotFoundError:
604
+ raise util.CommandError(
605
+ "Can't locate timezone: %s" % self.timezone
606
+ ) from None
607
+
608
+ create_date = datetime.datetime.now(
609
+ tz=datetime.timezone.utc
610
+ ).astimezone(tzinfo)
611
+ else:
612
+ create_date = datetime.datetime.now()
613
+ return create_date
614
+
615
+ def generate_revision(
616
+ self,
617
+ revid: str,
618
+ message: Optional[str],
619
+ head: Optional[_RevIdType] = None,
620
+ splice: Optional[bool] = False,
621
+ branch_labels: Optional[_RevIdType] = None,
622
+ version_path: Union[str, os.PathLike[str], None] = None,
623
+ file_template: Optional[str] = None,
624
+ depends_on: Optional[_RevIdType] = None,
625
+ **kw: Any,
626
+ ) -> Optional[Script]:
627
+ """Generate a new revision file.
628
+
629
+ This runs the ``script.py.mako`` template, given
630
+ template arguments, and creates a new file.
631
+
632
+ :param revid: String revision id. Typically this
633
+ comes from ``alembic.util.rev_id()``.
634
+ :param message: the revision message, the one passed
635
+ by the -m argument to the ``revision`` command.
636
+ :param head: the head revision to generate against. Defaults
637
+ to the current "head" if no branches are present, else raises
638
+ an exception.
639
+ :param splice: if True, allow the "head" version to not be an
640
+ actual head; otherwise, the selected head must be a head
641
+ (e.g. endpoint) revision.
642
+
643
+ """
644
+ if head is None:
645
+ head = "head"
646
+
647
+ try:
648
+ Script.verify_rev_id(revid)
649
+ except revision.RevisionError as err:
650
+ raise util.CommandError(err.args[0]) from err
651
+
652
+ with self._catch_revision_errors(
653
+ multiple_heads=(
654
+ "Multiple heads are present; please specify the head "
655
+ "revision on which the new revision should be based, "
656
+ "or perform a merge."
657
+ )
658
+ ):
659
+ heads = cast(
660
+ Tuple[Optional["Revision"], ...],
661
+ self.revision_map.get_revisions(head),
662
+ )
663
+ for h in heads:
664
+ assert h != "base" # type: ignore[comparison-overlap]
665
+
666
+ if len(set(heads)) != len(heads):
667
+ raise util.CommandError("Duplicate head revisions specified")
668
+
669
+ create_date = self._generate_create_date()
670
+
671
+ if version_path is None:
672
+ if len(self._version_locations) > 1:
673
+ for head_ in heads:
674
+ if head_ is not None:
675
+ assert isinstance(head_, Script)
676
+ version_path = head_._script_path.parent
677
+ break
678
+ else:
679
+ raise util.CommandError(
680
+ "Multiple version locations present, "
681
+ "please specify --version-path"
682
+ )
683
+ else:
684
+ version_path = self._singular_version_location
685
+ else:
686
+ version_path = Path(version_path)
687
+
688
+ assert isinstance(version_path, Path)
689
+ norm_path = version_path.absolute()
690
+ for vers_path in self._version_locations:
691
+ if vers_path.absolute() == norm_path:
692
+ break
693
+ else:
694
+ raise util.CommandError(
695
+ f"Path {version_path} is not represented in current "
696
+ "version locations"
697
+ )
698
+
699
+ if self.version_locations:
700
+ self._ensure_directory(version_path)
701
+
702
+ path = self._rev_path(version_path, revid, message, create_date)
703
+
704
+ if not splice:
705
+ for head_ in heads:
706
+ if head_ is not None and not head_.is_head:
707
+ raise util.CommandError(
708
+ "Revision %s is not a head revision; please specify "
709
+ "--splice to create a new branch from this revision"
710
+ % head_.revision
711
+ )
712
+
713
+ resolved_depends_on: Optional[List[str]]
714
+ if depends_on:
715
+ with self._catch_revision_errors():
716
+ resolved_depends_on = [
717
+ (
718
+ dep
719
+ if dep in rev.branch_labels # maintain branch labels
720
+ else rev.revision
721
+ ) # resolve partial revision identifiers
722
+ for rev, dep in [
723
+ (not_none(self.revision_map.get_revision(dep)), dep)
724
+ for dep in util.to_list(depends_on)
725
+ ]
726
+ ]
727
+ else:
728
+ resolved_depends_on = None
729
+
730
+ self._generate_template(
731
+ Path(self.dir, "script.py.mako"),
732
+ path,
733
+ up_revision=str(revid),
734
+ down_revision=revision.tuple_rev_as_scalar(
735
+ tuple(h.revision if h is not None else None for h in heads)
736
+ ),
737
+ branch_labels=util.to_tuple(branch_labels),
738
+ depends_on=revision.tuple_rev_as_scalar(resolved_depends_on),
739
+ create_date=create_date,
740
+ comma=util.format_as_comma,
741
+ message=message if message is not None else ("empty message"),
742
+ **kw,
743
+ )
744
+
745
+ post_write_hooks = self.hooks
746
+ if post_write_hooks:
747
+ write_hooks._run_hooks(path, post_write_hooks)
748
+
749
+ try:
750
+ script = Script._from_path(self, path)
751
+ except revision.RevisionError as err:
752
+ raise util.CommandError(err.args[0]) from err
753
+ if script is None:
754
+ return None
755
+ if branch_labels and not script.branch_labels:
756
+ raise util.CommandError(
757
+ "Version %s specified branch_labels %s, however the "
758
+ "migration file %s does not have them; have you upgraded "
759
+ "your script.py.mako to include the "
760
+ "'branch_labels' section?"
761
+ % (script.revision, branch_labels, script.path)
762
+ )
763
+ self.revision_map.add_revision(script)
764
+ return script
765
+
766
+ def _rev_path(
767
+ self,
768
+ path: Union[str, os.PathLike[str]],
769
+ rev_id: str,
770
+ message: Optional[str],
771
+ create_date: datetime.datetime,
772
+ ) -> Path:
773
+ epoch = int(create_date.timestamp())
774
+ slug = "_".join(_slug_re.findall(message or "")).lower()
775
+ if len(slug) > self.truncate_slug_length:
776
+ slug = slug[: self.truncate_slug_length].rsplit("_", 1)[0] + "_"
777
+ filename = "%s.py" % (
778
+ self.file_template
779
+ % {
780
+ "rev": rev_id,
781
+ "slug": slug,
782
+ "epoch": epoch,
783
+ "year": create_date.year,
784
+ "month": create_date.month,
785
+ "day": create_date.day,
786
+ "hour": create_date.hour,
787
+ "minute": create_date.minute,
788
+ "second": create_date.second,
789
+ }
790
+ )
791
+ return Path(path) / filename
792
+
793
+
794
+ class Script(revision.Revision):
795
+ """Represent a single revision file in a ``versions/`` directory.
796
+
797
+ The :class:`.Script` instance is returned by methods
798
+ such as :meth:`.ScriptDirectory.iterate_revisions`.
799
+
800
+ """
801
+
802
+ def __init__(
803
+ self,
804
+ module: ModuleType,
805
+ rev_id: str,
806
+ path: Union[str, os.PathLike[str]],
807
+ ):
808
+ self.module = module
809
+ self.path = _preserving_path_as_str(path)
810
+ super().__init__(
811
+ rev_id,
812
+ module.down_revision,
813
+ branch_labels=util.to_tuple(
814
+ getattr(module, "branch_labels", None), default=()
815
+ ),
816
+ dependencies=util.to_tuple(
817
+ getattr(module, "depends_on", None), default=()
818
+ ),
819
+ )
820
+
821
+ module: ModuleType
822
+ """The Python module representing the actual script itself."""
823
+
824
+ path: str
825
+ """Filesystem path of the script."""
826
+
827
+ @property
828
+ def _script_path(self) -> Path:
829
+ return Path(self.path)
830
+
831
+ _db_current_indicator: Optional[bool] = None
832
+ """Utility variable which when set will cause string output to indicate
833
+ this is a "current" version in some database"""
834
+
835
+ @property
836
+ def doc(self) -> str:
837
+ """Return the docstring given in the script."""
838
+
839
+ return re.split("\n\n", self.longdoc)[0]
840
+
841
+ @property
842
+ def longdoc(self) -> str:
843
+ """Return the docstring given in the script."""
844
+
845
+ doc = self.module.__doc__
846
+ if doc:
847
+ if hasattr(self.module, "_alembic_source_encoding"):
848
+ doc = doc.decode( # type: ignore[attr-defined]
849
+ self.module._alembic_source_encoding
850
+ )
851
+ return doc.strip()
852
+ else:
853
+ return ""
854
+
855
+ @property
856
+ def log_entry(self) -> str:
857
+ entry = "Rev: %s%s%s%s%s\n" % (
858
+ self.revision,
859
+ " (head)" if self.is_head else "",
860
+ " (branchpoint)" if self.is_branch_point else "",
861
+ " (mergepoint)" if self.is_merge_point else "",
862
+ " (current)" if self._db_current_indicator else "",
863
+ )
864
+ if self.is_merge_point:
865
+ entry += "Merges: %s\n" % (self._format_down_revision(),)
866
+ else:
867
+ entry += "Parent: %s\n" % (self._format_down_revision(),)
868
+
869
+ if self.dependencies:
870
+ entry += "Also depends on: %s\n" % (
871
+ util.format_as_comma(self.dependencies)
872
+ )
873
+
874
+ if self.is_branch_point:
875
+ entry += "Branches into: %s\n" % (
876
+ util.format_as_comma(self.nextrev)
877
+ )
878
+
879
+ if self.branch_labels:
880
+ entry += "Branch names: %s\n" % (
881
+ util.format_as_comma(self.branch_labels),
882
+ )
883
+
884
+ entry += "Path: %s\n" % (self.path,)
885
+
886
+ entry += "\n%s\n" % (
887
+ "\n".join(" %s" % para for para in self.longdoc.splitlines())
888
+ )
889
+ return entry
890
+
891
+ def __str__(self) -> str:
892
+ return "%s -> %s%s%s%s, %s" % (
893
+ self._format_down_revision(),
894
+ self.revision,
895
+ " (head)" if self.is_head else "",
896
+ " (branchpoint)" if self.is_branch_point else "",
897
+ " (mergepoint)" if self.is_merge_point else "",
898
+ self.doc,
899
+ )
900
+
901
+ def _head_only(
902
+ self,
903
+ include_branches: bool = False,
904
+ include_doc: bool = False,
905
+ include_parents: bool = False,
906
+ tree_indicators: bool = True,
907
+ head_indicators: bool = True,
908
+ ) -> str:
909
+ text = self.revision
910
+ if include_parents:
911
+ if self.dependencies:
912
+ text = "%s (%s) -> %s" % (
913
+ self._format_down_revision(),
914
+ util.format_as_comma(self.dependencies),
915
+ text,
916
+ )
917
+ else:
918
+ text = "%s -> %s" % (self._format_down_revision(), text)
919
+ assert text is not None
920
+ if include_branches and self.branch_labels:
921
+ text += " (%s)" % util.format_as_comma(self.branch_labels)
922
+ if head_indicators or tree_indicators:
923
+ text += "%s%s%s" % (
924
+ " (head)" if self._is_real_head else "",
925
+ (
926
+ " (effective head)"
927
+ if self.is_head and not self._is_real_head
928
+ else ""
929
+ ),
930
+ " (current)" if self._db_current_indicator else "",
931
+ )
932
+ if tree_indicators:
933
+ text += "%s%s" % (
934
+ " (branchpoint)" if self.is_branch_point else "",
935
+ " (mergepoint)" if self.is_merge_point else "",
936
+ )
937
+ if include_doc:
938
+ text += ", %s" % self.doc
939
+ return text
940
+
941
+ def cmd_format(
942
+ self,
943
+ verbose: bool,
944
+ include_branches: bool = False,
945
+ include_doc: bool = False,
946
+ include_parents: bool = False,
947
+ tree_indicators: bool = True,
948
+ ) -> str:
949
+ if verbose:
950
+ return self.log_entry
951
+ else:
952
+ return self._head_only(
953
+ include_branches, include_doc, include_parents, tree_indicators
954
+ )
955
+
956
+ def _format_down_revision(self) -> str:
957
+ if not self.down_revision:
958
+ return "<base>"
959
+ else:
960
+ return util.format_as_comma(self._versioned_down_revisions)
961
+
962
+ @classmethod
963
+ def _list_py_dir(
964
+ cls, scriptdir: ScriptDirectory, path: Path
965
+ ) -> List[Path]:
966
+ paths = []
967
+ for root, dirs, files in compat.path_walk(path, top_down=True):
968
+ if root.name.endswith("__pycache__"):
969
+ # a special case - we may include these files
970
+ # if a `sourceless` option is specified
971
+ continue
972
+
973
+ for filename in sorted(files):
974
+ paths.append(root / filename)
975
+
976
+ if scriptdir.sourceless:
977
+ # look for __pycache__
978
+ py_cache_path = root / "__pycache__"
979
+ if py_cache_path.exists():
980
+ # add all files from __pycache__ whose filename is not
981
+ # already in the names we got from the version directory.
982
+ # add as relative paths including __pycache__ token
983
+ names = {
984
+ Path(filename).name.split(".")[0] for filename in files
985
+ }
986
+ paths.extend(
987
+ py_cache_path / pyc
988
+ for pyc in py_cache_path.iterdir()
989
+ if pyc.name.split(".")[0] not in names
990
+ )
991
+
992
+ if not scriptdir.recursive_version_locations:
993
+ break
994
+
995
+ # the real script order is defined by revision,
996
+ # but it may be undefined if there are many files with a same
997
+ # `down_revision`, for a better user experience (ex. debugging),
998
+ # we use a deterministic order
999
+ dirs.sort()
1000
+
1001
+ return paths
1002
+
1003
+ @classmethod
1004
+ def _from_path(
1005
+ cls, scriptdir: ScriptDirectory, path: Union[str, os.PathLike[str]]
1006
+ ) -> Optional[Script]:
1007
+
1008
+ path = Path(path)
1009
+ dir_, filename = path.parent, path.name
1010
+
1011
+ if scriptdir.sourceless:
1012
+ py_match = _sourceless_rev_file.match(filename)
1013
+ else:
1014
+ py_match = _only_source_rev_file.match(filename)
1015
+
1016
+ if not py_match:
1017
+ return None
1018
+
1019
+ py_filename = py_match.group(1)
1020
+
1021
+ if scriptdir.sourceless:
1022
+ is_c = py_match.group(2) == "c"
1023
+ is_o = py_match.group(2) == "o"
1024
+ else:
1025
+ is_c = is_o = False
1026
+
1027
+ if is_o or is_c:
1028
+ py_exists = (dir_ / py_filename).exists()
1029
+ pyc_exists = (dir_ / (py_filename + "c")).exists()
1030
+
1031
+ # prefer .py over .pyc because we'd like to get the
1032
+ # source encoding; prefer .pyc over .pyo because we'd like to
1033
+ # have the docstrings which a -OO file would not have
1034
+ if py_exists or is_o and pyc_exists:
1035
+ return None
1036
+
1037
+ module = util.load_python_file(dir_, filename)
1038
+
1039
+ if not hasattr(module, "revision"):
1040
+ # attempt to get the revision id from the script name,
1041
+ # this for legacy only
1042
+ m = _legacy_rev.match(filename)
1043
+ if not m:
1044
+ raise util.CommandError(
1045
+ "Could not determine revision id from "
1046
+ f"filename {filename}. "
1047
+ "Be sure the 'revision' variable is "
1048
+ "declared inside the script (please see 'Upgrading "
1049
+ "from Alembic 0.1 to 0.2' in the documentation)."
1050
+ )
1051
+ else:
1052
+ revision = m.group(1)
1053
+ else:
1054
+ revision = module.revision
1055
+ return Script(module, revision, dir_ / filename)
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/script/revision.py ADDED
@@ -0,0 +1,1728 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import collections
4
+ import re
5
+ from typing import Any
6
+ from typing import Callable
7
+ from typing import cast
8
+ from typing import Collection
9
+ from typing import Deque
10
+ from typing import Dict
11
+ from typing import FrozenSet
12
+ from typing import Iterable
13
+ from typing import Iterator
14
+ from typing import List
15
+ from typing import Optional
16
+ from typing import overload
17
+ from typing import Protocol
18
+ from typing import Sequence
19
+ from typing import Set
20
+ from typing import Tuple
21
+ from typing import TYPE_CHECKING
22
+ from typing import TypeVar
23
+ from typing import Union
24
+
25
+ from sqlalchemy import util as sqlautil
26
+
27
+ from .. import util
28
+ from ..util import not_none
29
+
30
+ if TYPE_CHECKING:
31
+ from typing import Literal
32
+
33
+ _RevIdType = Union[str, List[str], Tuple[str, ...]]
34
+ _GetRevArg = Union[
35
+ str,
36
+ Iterable[Optional[str]],
37
+ Iterable[str],
38
+ ]
39
+ _RevisionIdentifierType = Union[str, Tuple[str, ...], None]
40
+ _RevisionOrStr = Union["Revision", str]
41
+ _RevisionOrBase = Union["Revision", "Literal['base']"]
42
+ _InterimRevisionMapType = Dict[str, "Revision"]
43
+ _RevisionMapType = Dict[Union[None, str, Tuple[()]], Optional["Revision"]]
44
+ _T = TypeVar("_T")
45
+ _TR = TypeVar("_TR", bound=Optional[_RevisionOrStr])
46
+
47
+ _relative_destination = re.compile(r"(?:(.+?)@)?(\w+)?((?:\+|-)\d+)")
48
+ _revision_illegal_chars = ["@", "-", "+"]
49
+
50
+
51
+ class _CollectRevisionsProtocol(Protocol):
52
+ def __call__(
53
+ self,
54
+ upper: _RevisionIdentifierType,
55
+ lower: _RevisionIdentifierType,
56
+ inclusive: bool,
57
+ implicit_base: bool,
58
+ assert_relative_length: bool,
59
+ ) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase], ...]]: ...
60
+
61
+
62
+ class RevisionError(Exception):
63
+ pass
64
+
65
+
66
+ class RangeNotAncestorError(RevisionError):
67
+ def __init__(
68
+ self, lower: _RevisionIdentifierType, upper: _RevisionIdentifierType
69
+ ) -> None:
70
+ self.lower = lower
71
+ self.upper = upper
72
+ super().__init__(
73
+ "Revision %s is not an ancestor of revision %s"
74
+ % (lower or "base", upper or "base")
75
+ )
76
+
77
+
78
+ class MultipleHeads(RevisionError):
79
+ def __init__(self, heads: Sequence[str], argument: Optional[str]) -> None:
80
+ self.heads = heads
81
+ self.argument = argument
82
+ super().__init__(
83
+ "Multiple heads are present for given argument '%s'; "
84
+ "%s" % (argument, ", ".join(heads))
85
+ )
86
+
87
+
88
+ class ResolutionError(RevisionError):
89
+ def __init__(self, message: str, argument: str) -> None:
90
+ super().__init__(message)
91
+ self.argument = argument
92
+
93
+
94
+ class CycleDetected(RevisionError):
95
+ kind = "Cycle"
96
+
97
+ def __init__(self, revisions: Sequence[str]) -> None:
98
+ self.revisions = revisions
99
+ super().__init__(
100
+ "%s is detected in revisions (%s)"
101
+ % (self.kind, ", ".join(revisions))
102
+ )
103
+
104
+
105
+ class DependencyCycleDetected(CycleDetected):
106
+ kind = "Dependency cycle"
107
+
108
+ def __init__(self, revisions: Sequence[str]) -> None:
109
+ super().__init__(revisions)
110
+
111
+
112
+ class LoopDetected(CycleDetected):
113
+ kind = "Self-loop"
114
+
115
+ def __init__(self, revision: str) -> None:
116
+ super().__init__([revision])
117
+
118
+
119
+ class DependencyLoopDetected(DependencyCycleDetected, LoopDetected):
120
+ kind = "Dependency self-loop"
121
+
122
+ def __init__(self, revision: Sequence[str]) -> None:
123
+ super().__init__(revision)
124
+
125
+
126
+ class RevisionMap:
127
+ """Maintains a map of :class:`.Revision` objects.
128
+
129
+ :class:`.RevisionMap` is used by :class:`.ScriptDirectory` to maintain
130
+ and traverse the collection of :class:`.Script` objects, which are
131
+ themselves instances of :class:`.Revision`.
132
+
133
+ """
134
+
135
+ def __init__(self, generator: Callable[[], Iterable[Revision]]) -> None:
136
+ """Construct a new :class:`.RevisionMap`.
137
+
138
+ :param generator: a zero-arg callable that will generate an iterable
139
+ of :class:`.Revision` instances to be used. These are typically
140
+ :class:`.Script` subclasses within regular Alembic use.
141
+
142
+ """
143
+ self._generator = generator
144
+
145
+ @util.memoized_property
146
+ def heads(self) -> Tuple[str, ...]:
147
+ """All "head" revisions as strings.
148
+
149
+ This is normally a tuple of length one,
150
+ unless unmerged branches are present.
151
+
152
+ :return: a tuple of string revision numbers.
153
+
154
+ """
155
+ self._revision_map
156
+ return self.heads
157
+
158
+ @util.memoized_property
159
+ def bases(self) -> Tuple[str, ...]:
160
+ """All "base" revisions as strings.
161
+
162
+ These are revisions that have a ``down_revision`` of None,
163
+ or empty tuple.
164
+
165
+ :return: a tuple of string revision numbers.
166
+
167
+ """
168
+ self._revision_map
169
+ return self.bases
170
+
171
+ @util.memoized_property
172
+ def _real_heads(self) -> Tuple[str, ...]:
173
+ """All "real" head revisions as strings.
174
+
175
+ :return: a tuple of string revision numbers.
176
+
177
+ """
178
+ self._revision_map
179
+ return self._real_heads
180
+
181
+ @util.memoized_property
182
+ def _real_bases(self) -> Tuple[str, ...]:
183
+ """All "real" base revisions as strings.
184
+
185
+ :return: a tuple of string revision numbers.
186
+
187
+ """
188
+ self._revision_map
189
+ return self._real_bases
190
+
191
+ @util.memoized_property
192
+ def _revision_map(self) -> _RevisionMapType:
193
+ """memoized attribute, initializes the revision map from the
194
+ initial collection.
195
+
196
+ """
197
+ # Ordering required for some tests to pass (but not required in
198
+ # general)
199
+ map_: _InterimRevisionMapType = sqlautil.OrderedDict()
200
+
201
+ heads: Set[Revision] = sqlautil.OrderedSet()
202
+ _real_heads: Set[Revision] = sqlautil.OrderedSet()
203
+ bases: Tuple[Revision, ...] = ()
204
+ _real_bases: Tuple[Revision, ...] = ()
205
+
206
+ has_branch_labels = set()
207
+ all_revisions = set()
208
+
209
+ for revision in self._generator():
210
+ all_revisions.add(revision)
211
+
212
+ if revision.revision in map_:
213
+ util.warn(
214
+ "Revision %s is present more than once" % revision.revision
215
+ )
216
+ map_[revision.revision] = revision
217
+ if revision.branch_labels:
218
+ has_branch_labels.add(revision)
219
+
220
+ heads.add(revision)
221
+ _real_heads.add(revision)
222
+ if revision.is_base:
223
+ bases += (revision,)
224
+ if revision._is_real_base:
225
+ _real_bases += (revision,)
226
+
227
+ # add the branch_labels to the map_. We'll need these
228
+ # to resolve the dependencies.
229
+ rev_map = map_.copy()
230
+ self._map_branch_labels(
231
+ has_branch_labels, cast(_RevisionMapType, map_)
232
+ )
233
+
234
+ # resolve dependency names from branch labels and symbolic
235
+ # names
236
+ self._add_depends_on(all_revisions, cast(_RevisionMapType, map_))
237
+
238
+ for rev in map_.values():
239
+ for downrev in rev._all_down_revisions:
240
+ if downrev not in map_:
241
+ util.warn(
242
+ "Revision %s referenced from %s is not present"
243
+ % (downrev, rev)
244
+ )
245
+ down_revision = map_[downrev]
246
+ down_revision.add_nextrev(rev)
247
+ if downrev in rev._versioned_down_revisions:
248
+ heads.discard(down_revision)
249
+ _real_heads.discard(down_revision)
250
+
251
+ # once the map has downrevisions populated, the dependencies
252
+ # can be further refined to include only those which are not
253
+ # already ancestors
254
+ self._normalize_depends_on(all_revisions, cast(_RevisionMapType, map_))
255
+ self._detect_cycles(rev_map, heads, bases, _real_heads, _real_bases)
256
+
257
+ revision_map: _RevisionMapType = dict(map_.items())
258
+ revision_map[None] = revision_map[()] = None
259
+ self.heads = tuple(rev.revision for rev in heads)
260
+ self._real_heads = tuple(rev.revision for rev in _real_heads)
261
+ self.bases = tuple(rev.revision for rev in bases)
262
+ self._real_bases = tuple(rev.revision for rev in _real_bases)
263
+
264
+ self._add_branches(has_branch_labels, revision_map)
265
+ return revision_map
266
+
267
+ def _detect_cycles(
268
+ self,
269
+ rev_map: _InterimRevisionMapType,
270
+ heads: Set[Revision],
271
+ bases: Tuple[Revision, ...],
272
+ _real_heads: Set[Revision],
273
+ _real_bases: Tuple[Revision, ...],
274
+ ) -> None:
275
+ if not rev_map:
276
+ return
277
+ if not heads or not bases:
278
+ raise CycleDetected(list(rev_map))
279
+ total_space = {
280
+ rev.revision
281
+ for rev in self._iterate_related_revisions(
282
+ lambda r: r._versioned_down_revisions,
283
+ heads,
284
+ map_=cast(_RevisionMapType, rev_map),
285
+ )
286
+ }.intersection(
287
+ rev.revision
288
+ for rev in self._iterate_related_revisions(
289
+ lambda r: r.nextrev,
290
+ bases,
291
+ map_=cast(_RevisionMapType, rev_map),
292
+ )
293
+ )
294
+ deleted_revs = set(rev_map.keys()) - total_space
295
+ if deleted_revs:
296
+ raise CycleDetected(sorted(deleted_revs))
297
+
298
+ if not _real_heads or not _real_bases:
299
+ raise DependencyCycleDetected(list(rev_map))
300
+ total_space = {
301
+ rev.revision
302
+ for rev in self._iterate_related_revisions(
303
+ lambda r: r._all_down_revisions,
304
+ _real_heads,
305
+ map_=cast(_RevisionMapType, rev_map),
306
+ )
307
+ }.intersection(
308
+ rev.revision
309
+ for rev in self._iterate_related_revisions(
310
+ lambda r: r._all_nextrev,
311
+ _real_bases,
312
+ map_=cast(_RevisionMapType, rev_map),
313
+ )
314
+ )
315
+ deleted_revs = set(rev_map.keys()) - total_space
316
+ if deleted_revs:
317
+ raise DependencyCycleDetected(sorted(deleted_revs))
318
+
319
+ def _map_branch_labels(
320
+ self, revisions: Collection[Revision], map_: _RevisionMapType
321
+ ) -> None:
322
+ for revision in revisions:
323
+ if revision.branch_labels:
324
+ assert revision._orig_branch_labels is not None
325
+ for branch_label in revision._orig_branch_labels:
326
+ if branch_label in map_:
327
+ map_rev = map_[branch_label]
328
+ assert map_rev is not None
329
+ raise RevisionError(
330
+ "Branch name '%s' in revision %s already "
331
+ "used by revision %s"
332
+ % (
333
+ branch_label,
334
+ revision.revision,
335
+ map_rev.revision,
336
+ )
337
+ )
338
+ map_[branch_label] = revision
339
+
340
+ def _add_branches(
341
+ self, revisions: Collection[Revision], map_: _RevisionMapType
342
+ ) -> None:
343
+ for revision in revisions:
344
+ if revision.branch_labels:
345
+ revision.branch_labels.update(revision.branch_labels)
346
+ for node in self._get_descendant_nodes(
347
+ [revision], map_, include_dependencies=False
348
+ ):
349
+ node.branch_labels.update(revision.branch_labels)
350
+
351
+ parent = node
352
+ while (
353
+ parent
354
+ and not parent._is_real_branch_point
355
+ and not parent.is_merge_point
356
+ ):
357
+ parent.branch_labels.update(revision.branch_labels)
358
+ if parent.down_revision:
359
+ parent = map_[parent.down_revision]
360
+ else:
361
+ break
362
+
363
+ def _add_depends_on(
364
+ self, revisions: Collection[Revision], map_: _RevisionMapType
365
+ ) -> None:
366
+ """Resolve the 'dependencies' for each revision in a collection
367
+ in terms of actual revision ids, as opposed to branch labels or other
368
+ symbolic names.
369
+
370
+ The collection is then assigned to the _resolved_dependencies
371
+ attribute on each revision object.
372
+
373
+ """
374
+
375
+ for revision in revisions:
376
+ if revision.dependencies:
377
+ deps = [
378
+ map_[dep] for dep in util.to_tuple(revision.dependencies)
379
+ ]
380
+ revision._resolved_dependencies = tuple(
381
+ [d.revision for d in deps if d is not None]
382
+ )
383
+ else:
384
+ revision._resolved_dependencies = ()
385
+
386
+ def _normalize_depends_on(
387
+ self, revisions: Collection[Revision], map_: _RevisionMapType
388
+ ) -> None:
389
+ """Create a collection of "dependencies" that omits dependencies
390
+ that are already ancestor nodes for each revision in a given
391
+ collection.
392
+
393
+ This builds upon the _resolved_dependencies collection created in the
394
+ _add_depends_on() method, looking in the fully populated revision map
395
+ for ancestors, and omitting them as the _resolved_dependencies
396
+ collection as it is copied to a new collection. The new collection is
397
+ then assigned to the _normalized_resolved_dependencies attribute on
398
+ each revision object.
399
+
400
+ The collection is then used to determine the immediate "down revision"
401
+ identifiers for this revision.
402
+
403
+ """
404
+
405
+ for revision in revisions:
406
+ if revision._resolved_dependencies:
407
+ normalized_resolved = set(revision._resolved_dependencies)
408
+ for rev in self._get_ancestor_nodes(
409
+ [revision],
410
+ include_dependencies=False,
411
+ map_=map_,
412
+ ):
413
+ if rev is revision:
414
+ continue
415
+ elif rev._resolved_dependencies:
416
+ normalized_resolved.difference_update(
417
+ rev._resolved_dependencies
418
+ )
419
+
420
+ revision._normalized_resolved_dependencies = tuple(
421
+ normalized_resolved
422
+ )
423
+ else:
424
+ revision._normalized_resolved_dependencies = ()
425
+
426
+ def add_revision(self, revision: Revision, _replace: bool = False) -> None:
427
+ """add a single revision to an existing map.
428
+
429
+ This method is for single-revision use cases, it's not
430
+ appropriate for fully populating an entire revision map.
431
+
432
+ """
433
+ map_ = self._revision_map
434
+ if not _replace and revision.revision in map_:
435
+ util.warn(
436
+ "Revision %s is present more than once" % revision.revision
437
+ )
438
+ elif _replace and revision.revision not in map_:
439
+ raise Exception("revision %s not in map" % revision.revision)
440
+
441
+ map_[revision.revision] = revision
442
+
443
+ revisions = [revision]
444
+ self._add_branches(revisions, map_)
445
+ self._map_branch_labels(revisions, map_)
446
+ self._add_depends_on(revisions, map_)
447
+
448
+ if revision.is_base:
449
+ self.bases += (revision.revision,)
450
+ if revision._is_real_base:
451
+ self._real_bases += (revision.revision,)
452
+
453
+ for downrev in revision._all_down_revisions:
454
+ if downrev not in map_:
455
+ util.warn(
456
+ "Revision %s referenced from %s is not present"
457
+ % (downrev, revision)
458
+ )
459
+ not_none(map_[downrev]).add_nextrev(revision)
460
+
461
+ self._normalize_depends_on(revisions, map_)
462
+
463
+ if revision._is_real_head:
464
+ self._real_heads = tuple(
465
+ head
466
+ for head in self._real_heads
467
+ if head
468
+ not in set(revision._all_down_revisions).union(
469
+ [revision.revision]
470
+ )
471
+ ) + (revision.revision,)
472
+ if revision.is_head:
473
+ self.heads = tuple(
474
+ head
475
+ for head in self.heads
476
+ if head
477
+ not in set(revision._versioned_down_revisions).union(
478
+ [revision.revision]
479
+ )
480
+ ) + (revision.revision,)
481
+
482
+ def get_current_head(
483
+ self, branch_label: Optional[str] = None
484
+ ) -> Optional[str]:
485
+ """Return the current head revision.
486
+
487
+ If the script directory has multiple heads
488
+ due to branching, an error is raised;
489
+ :meth:`.ScriptDirectory.get_heads` should be
490
+ preferred.
491
+
492
+ :param branch_label: optional branch name which will limit the
493
+ heads considered to those which include that branch_label.
494
+
495
+ :return: a string revision number.
496
+
497
+ .. seealso::
498
+
499
+ :meth:`.ScriptDirectory.get_heads`
500
+
501
+ """
502
+ current_heads: Sequence[str] = self.heads
503
+ if branch_label:
504
+ current_heads = self.filter_for_lineage(
505
+ current_heads, branch_label
506
+ )
507
+ if len(current_heads) > 1:
508
+ raise MultipleHeads(
509
+ current_heads,
510
+ "%s@head" % branch_label if branch_label else "head",
511
+ )
512
+
513
+ if current_heads:
514
+ return current_heads[0]
515
+ else:
516
+ return None
517
+
518
+ def _get_base_revisions(self, identifier: str) -> Tuple[str, ...]:
519
+ return self.filter_for_lineage(self.bases, identifier)
520
+
521
+ def get_revisions(
522
+ self, id_: Optional[_GetRevArg]
523
+ ) -> Tuple[Optional[_RevisionOrBase], ...]:
524
+ """Return the :class:`.Revision` instances with the given rev id
525
+ or identifiers.
526
+
527
+ May be given a single identifier, a sequence of identifiers, or the
528
+ special symbols "head" or "base". The result is a tuple of one
529
+ or more identifiers, or an empty tuple in the case of "base".
530
+
531
+ In the cases where 'head', 'heads' is requested and the
532
+ revision map is empty, returns an empty tuple.
533
+
534
+ Supports partial identifiers, where the given identifier
535
+ is matched against all identifiers that start with the given
536
+ characters; if there is exactly one match, that determines the
537
+ full revision.
538
+
539
+ """
540
+
541
+ if isinstance(id_, (list, tuple, set, frozenset)):
542
+ return sum([self.get_revisions(id_elem) for id_elem in id_], ())
543
+ else:
544
+ resolved_id, branch_label = self._resolve_revision_number(id_)
545
+ if len(resolved_id) == 1:
546
+ try:
547
+ rint = int(resolved_id[0])
548
+ if rint < 0:
549
+ # branch@-n -> walk down from heads
550
+ select_heads = self.get_revisions("heads")
551
+ if branch_label is not None:
552
+ select_heads = tuple(
553
+ head
554
+ for head in select_heads
555
+ if branch_label
556
+ in is_revision(head).branch_labels
557
+ )
558
+ return tuple(
559
+ self._walk(head, steps=rint)
560
+ for head in select_heads
561
+ )
562
+ except ValueError:
563
+ # couldn't resolve as integer
564
+ pass
565
+ return tuple(
566
+ self._revision_for_ident(rev_id, branch_label)
567
+ for rev_id in resolved_id
568
+ )
569
+
570
+ def get_revision(self, id_: Optional[str]) -> Optional[Revision]:
571
+ """Return the :class:`.Revision` instance with the given rev id.
572
+
573
+ If a symbolic name such as "head" or "base" is given, resolves
574
+ the identifier into the current head or base revision. If the symbolic
575
+ name refers to multiples, :class:`.MultipleHeads` is raised.
576
+
577
+ Supports partial identifiers, where the given identifier
578
+ is matched against all identifiers that start with the given
579
+ characters; if there is exactly one match, that determines the
580
+ full revision.
581
+
582
+ """
583
+
584
+ resolved_id, branch_label = self._resolve_revision_number(id_)
585
+ if len(resolved_id) > 1:
586
+ raise MultipleHeads(resolved_id, id_)
587
+
588
+ resolved: Union[str, Tuple[()]] = resolved_id[0] if resolved_id else ()
589
+ return self._revision_for_ident(resolved, branch_label)
590
+
591
+ def _resolve_branch(self, branch_label: str) -> Optional[Revision]:
592
+ try:
593
+ branch_rev = self._revision_map[branch_label]
594
+ except KeyError:
595
+ try:
596
+ nonbranch_rev = self._revision_for_ident(branch_label)
597
+ except ResolutionError as re:
598
+ raise ResolutionError(
599
+ "No such branch: '%s'" % branch_label, branch_label
600
+ ) from re
601
+
602
+ else:
603
+ return nonbranch_rev
604
+ else:
605
+ return branch_rev
606
+
607
+ def _revision_for_ident(
608
+ self,
609
+ resolved_id: Union[str, Tuple[()], None],
610
+ check_branch: Optional[str] = None,
611
+ ) -> Optional[Revision]:
612
+ branch_rev: Optional[Revision]
613
+ if check_branch:
614
+ branch_rev = self._resolve_branch(check_branch)
615
+ else:
616
+ branch_rev = None
617
+
618
+ revision: Union[Optional[Revision], Literal[False]]
619
+ try:
620
+ revision = self._revision_map[resolved_id]
621
+ except KeyError:
622
+ # break out to avoid misleading py3k stack traces
623
+ revision = False
624
+ revs: Sequence[str]
625
+ if revision is False:
626
+ assert resolved_id
627
+ # do a partial lookup
628
+ revs = [
629
+ x
630
+ for x in self._revision_map
631
+ if x and len(x) > 3 and x.startswith(resolved_id)
632
+ ]
633
+
634
+ if branch_rev:
635
+ revs = self.filter_for_lineage(revs, check_branch)
636
+ if not revs:
637
+ raise ResolutionError(
638
+ "No such revision or branch '%s'%s"
639
+ % (
640
+ resolved_id,
641
+ (
642
+ "; please ensure at least four characters are "
643
+ "present for partial revision identifier matches"
644
+ if len(resolved_id) < 4
645
+ else ""
646
+ ),
647
+ ),
648
+ resolved_id,
649
+ )
650
+ elif len(revs) > 1:
651
+ raise ResolutionError(
652
+ "Multiple revisions start "
653
+ "with '%s': %s..."
654
+ % (resolved_id, ", ".join("'%s'" % r for r in revs[0:3])),
655
+ resolved_id,
656
+ )
657
+ else:
658
+ revision = self._revision_map[revs[0]]
659
+
660
+ if check_branch and revision is not None:
661
+ assert branch_rev is not None
662
+ assert resolved_id
663
+ if not self._shares_lineage(
664
+ revision.revision, branch_rev.revision
665
+ ):
666
+ raise ResolutionError(
667
+ "Revision %s is not a member of branch '%s'"
668
+ % (revision.revision, check_branch),
669
+ resolved_id,
670
+ )
671
+ return revision
672
+
673
+ def _filter_into_branch_heads(
674
+ self, targets: Iterable[Optional[_RevisionOrBase]]
675
+ ) -> Set[Optional[_RevisionOrBase]]:
676
+ targets = set(targets)
677
+
678
+ for rev in list(targets):
679
+ assert rev
680
+ if targets.intersection(
681
+ self._get_descendant_nodes([rev], include_dependencies=False)
682
+ ).difference([rev]):
683
+ targets.discard(rev)
684
+ return targets
685
+
686
+ def filter_for_lineage(
687
+ self,
688
+ targets: Iterable[_TR],
689
+ check_against: Optional[str],
690
+ include_dependencies: bool = False,
691
+ ) -> Tuple[_TR, ...]:
692
+ id_, branch_label = self._resolve_revision_number(check_against)
693
+
694
+ shares = []
695
+ if branch_label:
696
+ shares.append(branch_label)
697
+ if id_:
698
+ shares.extend(id_)
699
+
700
+ return tuple(
701
+ tg
702
+ for tg in targets
703
+ if self._shares_lineage(
704
+ tg, shares, include_dependencies=include_dependencies
705
+ )
706
+ )
707
+
708
+ def _shares_lineage(
709
+ self,
710
+ target: Optional[_RevisionOrStr],
711
+ test_against_revs: Sequence[_RevisionOrStr],
712
+ include_dependencies: bool = False,
713
+ ) -> bool:
714
+ if not test_against_revs:
715
+ return True
716
+ if not isinstance(target, Revision):
717
+ resolved_target = not_none(self._revision_for_ident(target))
718
+ else:
719
+ resolved_target = target
720
+
721
+ resolved_test_against_revs = [
722
+ (
723
+ self._revision_for_ident(test_against_rev)
724
+ if not isinstance(test_against_rev, Revision)
725
+ else test_against_rev
726
+ )
727
+ for test_against_rev in util.to_tuple(
728
+ test_against_revs, default=()
729
+ )
730
+ ]
731
+
732
+ return bool(
733
+ set(
734
+ self._get_descendant_nodes(
735
+ [resolved_target],
736
+ include_dependencies=include_dependencies,
737
+ )
738
+ )
739
+ .union(
740
+ self._get_ancestor_nodes(
741
+ [resolved_target],
742
+ include_dependencies=include_dependencies,
743
+ )
744
+ )
745
+ .intersection(resolved_test_against_revs)
746
+ )
747
+
748
+ def _resolve_revision_number(
749
+ self, id_: Optional[_GetRevArg]
750
+ ) -> Tuple[Tuple[str, ...], Optional[str]]:
751
+ branch_label: Optional[str]
752
+ if isinstance(id_, str) and "@" in id_:
753
+ branch_label, id_ = id_.split("@", 1)
754
+
755
+ elif id_ is not None and (
756
+ (isinstance(id_, tuple) and id_ and not isinstance(id_[0], str))
757
+ or not isinstance(id_, (str, tuple))
758
+ ):
759
+ raise RevisionError(
760
+ "revision identifier %r is not a string; ensure database "
761
+ "driver settings are correct" % (id_,)
762
+ )
763
+
764
+ else:
765
+ branch_label = None
766
+
767
+ # ensure map is loaded
768
+ self._revision_map
769
+ if id_ == "heads":
770
+ if branch_label:
771
+ return (
772
+ self.filter_for_lineage(self.heads, branch_label),
773
+ branch_label,
774
+ )
775
+ else:
776
+ return self._real_heads, branch_label
777
+ elif id_ == "head":
778
+ current_head = self.get_current_head(branch_label)
779
+ if current_head:
780
+ return (current_head,), branch_label
781
+ else:
782
+ return (), branch_label
783
+ elif id_ == "base" or id_ is None:
784
+ return (), branch_label
785
+ else:
786
+ return util.to_tuple(id_, default=None), branch_label
787
+
788
+ def iterate_revisions(
789
+ self,
790
+ upper: _RevisionIdentifierType,
791
+ lower: _RevisionIdentifierType,
792
+ implicit_base: bool = False,
793
+ inclusive: bool = False,
794
+ assert_relative_length: bool = True,
795
+ select_for_downgrade: bool = False,
796
+ ) -> Iterator[Revision]:
797
+ """Iterate through script revisions, starting at the given
798
+ upper revision identifier and ending at the lower.
799
+
800
+ The traversal uses strictly the `down_revision`
801
+ marker inside each migration script, so
802
+ it is a requirement that upper >= lower,
803
+ else you'll get nothing back.
804
+
805
+ The iterator yields :class:`.Revision` objects.
806
+
807
+ """
808
+ fn: _CollectRevisionsProtocol
809
+ if select_for_downgrade:
810
+ fn = self._collect_downgrade_revisions
811
+ else:
812
+ fn = self._collect_upgrade_revisions
813
+
814
+ revisions, heads = fn(
815
+ upper,
816
+ lower,
817
+ inclusive=inclusive,
818
+ implicit_base=implicit_base,
819
+ assert_relative_length=assert_relative_length,
820
+ )
821
+
822
+ for node in self._topological_sort(revisions, heads):
823
+ yield not_none(self.get_revision(node))
824
+
825
+ def _get_descendant_nodes(
826
+ self,
827
+ targets: Collection[Optional[_RevisionOrBase]],
828
+ map_: Optional[_RevisionMapType] = None,
829
+ check: bool = False,
830
+ omit_immediate_dependencies: bool = False,
831
+ include_dependencies: bool = True,
832
+ ) -> Iterator[Any]:
833
+ if omit_immediate_dependencies:
834
+
835
+ def fn(rev: Revision) -> Iterable[str]:
836
+ if rev not in targets:
837
+ return rev._all_nextrev
838
+ else:
839
+ return rev.nextrev
840
+
841
+ elif include_dependencies:
842
+
843
+ def fn(rev: Revision) -> Iterable[str]:
844
+ return rev._all_nextrev
845
+
846
+ else:
847
+
848
+ def fn(rev: Revision) -> Iterable[str]:
849
+ return rev.nextrev
850
+
851
+ return self._iterate_related_revisions(
852
+ fn, targets, map_=map_, check=check
853
+ )
854
+
855
+ def _get_ancestor_nodes(
856
+ self,
857
+ targets: Collection[Optional[_RevisionOrBase]],
858
+ map_: Optional[_RevisionMapType] = None,
859
+ check: bool = False,
860
+ include_dependencies: bool = True,
861
+ ) -> Iterator[Revision]:
862
+ if include_dependencies:
863
+
864
+ def fn(rev: Revision) -> Iterable[str]:
865
+ return rev._normalized_down_revisions
866
+
867
+ else:
868
+
869
+ def fn(rev: Revision) -> Iterable[str]:
870
+ return rev._versioned_down_revisions
871
+
872
+ return self._iterate_related_revisions(
873
+ fn, targets, map_=map_, check=check
874
+ )
875
+
876
+ def _iterate_related_revisions(
877
+ self,
878
+ fn: Callable[[Revision], Iterable[str]],
879
+ targets: Collection[Optional[_RevisionOrBase]],
880
+ map_: Optional[_RevisionMapType],
881
+ check: bool = False,
882
+ ) -> Iterator[Revision]:
883
+ if map_ is None:
884
+ map_ = self._revision_map
885
+
886
+ seen = set()
887
+ todo: Deque[Revision] = collections.deque()
888
+ for target_for in targets:
889
+ target = is_revision(target_for)
890
+ todo.append(target)
891
+ if check:
892
+ per_target = set()
893
+
894
+ while todo:
895
+ rev = todo.pop()
896
+ if check:
897
+ per_target.add(rev)
898
+
899
+ if rev in seen:
900
+ continue
901
+ seen.add(rev)
902
+ # Check for map errors before collecting.
903
+ for rev_id in fn(rev):
904
+ next_rev = map_[rev_id]
905
+ assert next_rev is not None
906
+ if next_rev.revision != rev_id:
907
+ raise RevisionError(
908
+ "Dependency resolution failed; broken map"
909
+ )
910
+ todo.append(next_rev)
911
+ yield rev
912
+ if check:
913
+ overlaps = per_target.intersection(targets).difference(
914
+ [target]
915
+ )
916
+ if overlaps:
917
+ raise RevisionError(
918
+ "Requested revision %s overlaps with "
919
+ "other requested revisions %s"
920
+ % (
921
+ target.revision,
922
+ ", ".join(r.revision for r in overlaps),
923
+ )
924
+ )
925
+
926
+ def _topological_sort(
927
+ self,
928
+ revisions: Collection[Revision],
929
+ heads: Any,
930
+ ) -> List[str]:
931
+ """Yield revision ids of a collection of Revision objects in
932
+ topological sorted order (i.e. revisions always come after their
933
+ down_revisions and dependencies). Uses the order of keys in
934
+ _revision_map to sort.
935
+
936
+ """
937
+
938
+ id_to_rev = self._revision_map
939
+
940
+ def get_ancestors(rev_id: str) -> Set[str]:
941
+ return {
942
+ r.revision
943
+ for r in self._get_ancestor_nodes([id_to_rev[rev_id]])
944
+ }
945
+
946
+ todo = {d.revision for d in revisions}
947
+
948
+ # Use revision map (ordered dict) key order to pre-sort.
949
+ inserted_order = list(self._revision_map)
950
+
951
+ current_heads = list(
952
+ sorted(
953
+ {d.revision for d in heads if d.revision in todo},
954
+ key=inserted_order.index,
955
+ )
956
+ )
957
+ ancestors_by_idx = [get_ancestors(rev_id) for rev_id in current_heads]
958
+
959
+ output = []
960
+
961
+ current_candidate_idx = 0
962
+ while current_heads:
963
+ candidate = current_heads[current_candidate_idx]
964
+
965
+ for check_head_index, ancestors in enumerate(ancestors_by_idx):
966
+ # scan all the heads. see if we can continue walking
967
+ # down the current branch indicated by current_candidate_idx.
968
+ if (
969
+ check_head_index != current_candidate_idx
970
+ and candidate in ancestors
971
+ ):
972
+ current_candidate_idx = check_head_index
973
+ # nope, another head is dependent on us, they have
974
+ # to be traversed first
975
+ break
976
+ else:
977
+ # yup, we can emit
978
+ if candidate in todo:
979
+ output.append(candidate)
980
+ todo.remove(candidate)
981
+
982
+ # now update the heads with our ancestors.
983
+
984
+ candidate_rev = id_to_rev[candidate]
985
+ assert candidate_rev is not None
986
+
987
+ heads_to_add = [
988
+ r
989
+ for r in candidate_rev._normalized_down_revisions
990
+ if r in todo and r not in current_heads
991
+ ]
992
+
993
+ if not heads_to_add:
994
+ # no ancestors, so remove this head from the list
995
+ del current_heads[current_candidate_idx]
996
+ del ancestors_by_idx[current_candidate_idx]
997
+ current_candidate_idx = max(current_candidate_idx - 1, 0)
998
+ else:
999
+ if (
1000
+ not candidate_rev._normalized_resolved_dependencies
1001
+ and len(candidate_rev._versioned_down_revisions) == 1
1002
+ ):
1003
+ current_heads[current_candidate_idx] = heads_to_add[0]
1004
+
1005
+ # for plain movement down a revision line without
1006
+ # any mergepoints, branchpoints, or deps, we
1007
+ # can update the ancestors collection directly
1008
+ # by popping out the candidate we just emitted
1009
+ ancestors_by_idx[current_candidate_idx].discard(
1010
+ candidate
1011
+ )
1012
+
1013
+ else:
1014
+ # otherwise recalculate it again, things get
1015
+ # complicated otherwise. This can possibly be
1016
+ # improved to not run the whole ancestor thing
1017
+ # each time but it was getting complicated
1018
+ current_heads[current_candidate_idx] = heads_to_add[0]
1019
+ current_heads.extend(heads_to_add[1:])
1020
+ ancestors_by_idx[current_candidate_idx] = (
1021
+ get_ancestors(heads_to_add[0])
1022
+ )
1023
+ ancestors_by_idx.extend(
1024
+ get_ancestors(head) for head in heads_to_add[1:]
1025
+ )
1026
+
1027
+ assert not todo
1028
+ return output
1029
+
1030
+ def _walk(
1031
+ self,
1032
+ start: Optional[Union[str, Revision]],
1033
+ steps: int,
1034
+ branch_label: Optional[str] = None,
1035
+ no_overwalk: bool = True,
1036
+ ) -> Optional[_RevisionOrBase]:
1037
+ """
1038
+ Walk the requested number of :steps up (steps > 0) or down (steps < 0)
1039
+ the revision tree.
1040
+
1041
+ :branch_label is used to select branches only when walking up.
1042
+
1043
+ If the walk goes past the boundaries of the tree and :no_overwalk is
1044
+ True, None is returned, otherwise the walk terminates early.
1045
+
1046
+ A RevisionError is raised if there is no unambiguous revision to
1047
+ walk to.
1048
+ """
1049
+ initial: Optional[_RevisionOrBase]
1050
+ if isinstance(start, str):
1051
+ initial = self.get_revision(start)
1052
+ else:
1053
+ initial = start
1054
+
1055
+ children: Sequence[Optional[_RevisionOrBase]]
1056
+ for _ in range(abs(steps)):
1057
+ if steps > 0:
1058
+ assert initial != "base" # type: ignore[comparison-overlap]
1059
+ # Walk up
1060
+ walk_up = [
1061
+ is_revision(rev)
1062
+ for rev in self.get_revisions(
1063
+ self.bases if initial is None else initial.nextrev
1064
+ )
1065
+ ]
1066
+ if branch_label:
1067
+ children = self.filter_for_lineage(walk_up, branch_label)
1068
+ else:
1069
+ children = walk_up
1070
+ else:
1071
+ # Walk down
1072
+ if initial == "base": # type: ignore[comparison-overlap]
1073
+ children = ()
1074
+ else:
1075
+ children = self.get_revisions(
1076
+ self.heads
1077
+ if initial is None
1078
+ else initial.down_revision
1079
+ )
1080
+ if not children:
1081
+ children = ("base",)
1082
+ if not children:
1083
+ # This will return an invalid result if no_overwalk, otherwise
1084
+ # further steps will stay where we are.
1085
+ ret = None if no_overwalk else initial
1086
+ return ret
1087
+ elif len(children) > 1:
1088
+ raise RevisionError("Ambiguous walk")
1089
+ initial = children[0]
1090
+
1091
+ return initial
1092
+
1093
+ def _parse_downgrade_target(
1094
+ self,
1095
+ current_revisions: _RevisionIdentifierType,
1096
+ target: _RevisionIdentifierType,
1097
+ assert_relative_length: bool,
1098
+ ) -> Tuple[Optional[str], Optional[_RevisionOrBase]]:
1099
+ """
1100
+ Parse downgrade command syntax :target to retrieve the target revision
1101
+ and branch label (if any) given the :current_revisions stamp of the
1102
+ database.
1103
+
1104
+ Returns a tuple (branch_label, target_revision) where branch_label
1105
+ is a string from the command specifying the branch to consider (or
1106
+ None if no branch given), and target_revision is a Revision object
1107
+ which the command refers to. target_revisions is None if the command
1108
+ refers to 'base'. The target may be specified in absolute form, or
1109
+ relative to :current_revisions.
1110
+ """
1111
+ if target is None:
1112
+ return None, None
1113
+ assert isinstance(
1114
+ target, str
1115
+ ), "Expected downgrade target in string form"
1116
+ match = _relative_destination.match(target)
1117
+ if match:
1118
+ branch_label, symbol, relative = match.groups()
1119
+ rel_int = int(relative)
1120
+ if rel_int >= 0:
1121
+ if symbol is None:
1122
+ # Downgrading to current + n is not valid.
1123
+ raise RevisionError(
1124
+ "Relative revision %s didn't "
1125
+ "produce %d migrations" % (relative, abs(rel_int))
1126
+ )
1127
+ # Find target revision relative to given symbol.
1128
+ rev = self._walk(
1129
+ symbol,
1130
+ rel_int,
1131
+ branch_label,
1132
+ no_overwalk=assert_relative_length,
1133
+ )
1134
+ if rev is None:
1135
+ raise RevisionError("Walked too far")
1136
+ return branch_label, rev
1137
+ else:
1138
+ relative_revision = symbol is None
1139
+ if relative_revision:
1140
+ # Find target revision relative to current state.
1141
+ if branch_label:
1142
+ cr_tuple = util.to_tuple(current_revisions)
1143
+ symbol_list: Sequence[str]
1144
+ symbol_list = self.filter_for_lineage(
1145
+ cr_tuple, branch_label
1146
+ )
1147
+ if not symbol_list:
1148
+ # check the case where there are multiple branches
1149
+ # but there is currently a single heads, since all
1150
+ # other branch heads are dependent of the current
1151
+ # single heads.
1152
+ all_current = cast(
1153
+ Set[Revision], self._get_all_current(cr_tuple)
1154
+ )
1155
+ sl_all_current = self.filter_for_lineage(
1156
+ all_current, branch_label
1157
+ )
1158
+ symbol_list = [
1159
+ r.revision if r else r # type: ignore[misc]
1160
+ for r in sl_all_current
1161
+ ]
1162
+
1163
+ assert len(symbol_list) == 1
1164
+ symbol = symbol_list[0]
1165
+ else:
1166
+ current_revisions = util.to_tuple(current_revisions)
1167
+ if not current_revisions:
1168
+ raise RevisionError(
1169
+ "Relative revision %s didn't "
1170
+ "produce %d migrations"
1171
+ % (relative, abs(rel_int))
1172
+ )
1173
+ # Have to check uniques here for duplicate rows test.
1174
+ if len(set(current_revisions)) > 1:
1175
+ util.warn(
1176
+ "downgrade -1 from multiple heads is "
1177
+ "ambiguous; "
1178
+ "this usage will be disallowed in a future "
1179
+ "release."
1180
+ )
1181
+ symbol = current_revisions[0]
1182
+ # Restrict iteration to just the selected branch when
1183
+ # ambiguous branches are involved.
1184
+ branch_label = symbol
1185
+ # Walk down the tree to find downgrade target.
1186
+ rev = self._walk(
1187
+ start=(
1188
+ self.get_revision(symbol)
1189
+ if branch_label is None
1190
+ else self.get_revision(
1191
+ "%s@%s" % (branch_label, symbol)
1192
+ )
1193
+ ),
1194
+ steps=rel_int,
1195
+ no_overwalk=assert_relative_length,
1196
+ )
1197
+ if rev is None:
1198
+ if relative_revision:
1199
+ raise RevisionError(
1200
+ "Relative revision %s didn't "
1201
+ "produce %d migrations" % (relative, abs(rel_int))
1202
+ )
1203
+ else:
1204
+ raise RevisionError("Walked too far")
1205
+ return branch_label, rev
1206
+
1207
+ # No relative destination given, revision specified is absolute.
1208
+ branch_label, _, symbol = target.rpartition("@")
1209
+ if not branch_label:
1210
+ branch_label = None
1211
+ return branch_label, self.get_revision(symbol)
1212
+
1213
+ def _parse_upgrade_target(
1214
+ self,
1215
+ current_revisions: _RevisionIdentifierType,
1216
+ target: _RevisionIdentifierType,
1217
+ assert_relative_length: bool,
1218
+ ) -> Tuple[Optional[_RevisionOrBase], ...]:
1219
+ """
1220
+ Parse upgrade command syntax :target to retrieve the target revision
1221
+ and given the :current_revisions stamp of the database.
1222
+
1223
+ Returns a tuple of Revision objects which should be iterated/upgraded
1224
+ to. The target may be specified in absolute form, or relative to
1225
+ :current_revisions.
1226
+ """
1227
+ if isinstance(target, str):
1228
+ match = _relative_destination.match(target)
1229
+ else:
1230
+ match = None
1231
+
1232
+ if not match:
1233
+ # No relative destination, target is absolute.
1234
+ return self.get_revisions(target)
1235
+
1236
+ current_revisions_tup: Union[str, Tuple[Optional[str], ...], None]
1237
+ current_revisions_tup = util.to_tuple(current_revisions)
1238
+
1239
+ branch_label, symbol, relative_str = match.groups()
1240
+ relative = int(relative_str)
1241
+ if relative > 0:
1242
+ if symbol is None:
1243
+ if not current_revisions_tup:
1244
+ current_revisions_tup = (None,)
1245
+ # Try to filter to a single target (avoid ambiguous branches).
1246
+ start_revs = current_revisions_tup
1247
+ if branch_label:
1248
+ start_revs = self.filter_for_lineage(
1249
+ self.get_revisions(current_revisions_tup), # type: ignore[arg-type] # noqa: E501
1250
+ branch_label,
1251
+ )
1252
+ if not start_revs:
1253
+ # The requested branch is not a head, so we need to
1254
+ # backtrack to find a branchpoint.
1255
+ active_on_branch = self.filter_for_lineage(
1256
+ self._get_ancestor_nodes(
1257
+ self.get_revisions(current_revisions_tup)
1258
+ ),
1259
+ branch_label,
1260
+ )
1261
+ # Find the tips of this set of revisions (revisions
1262
+ # without children within the set).
1263
+ start_revs = tuple(
1264
+ {rev.revision for rev in active_on_branch}
1265
+ - {
1266
+ down
1267
+ for rev in active_on_branch
1268
+ for down in rev._normalized_down_revisions
1269
+ }
1270
+ )
1271
+ if not start_revs:
1272
+ # We must need to go right back to base to find
1273
+ # a starting point for this branch.
1274
+ start_revs = (None,)
1275
+ if len(start_revs) > 1:
1276
+ raise RevisionError(
1277
+ "Ambiguous upgrade from multiple current revisions"
1278
+ )
1279
+ # Walk up from unique target revision.
1280
+ rev = self._walk(
1281
+ start=start_revs[0],
1282
+ steps=relative,
1283
+ branch_label=branch_label,
1284
+ no_overwalk=assert_relative_length,
1285
+ )
1286
+ if rev is None:
1287
+ raise RevisionError(
1288
+ "Relative revision %s didn't "
1289
+ "produce %d migrations" % (relative_str, abs(relative))
1290
+ )
1291
+ return (rev,)
1292
+ else:
1293
+ # Walk is relative to a given revision, not the current state.
1294
+ return (
1295
+ self._walk(
1296
+ start=self.get_revision(symbol),
1297
+ steps=relative,
1298
+ branch_label=branch_label,
1299
+ no_overwalk=assert_relative_length,
1300
+ ),
1301
+ )
1302
+ else:
1303
+ if symbol is None:
1304
+ # Upgrading to current - n is not valid.
1305
+ raise RevisionError(
1306
+ "Relative revision %s didn't "
1307
+ "produce %d migrations" % (relative, abs(relative))
1308
+ )
1309
+ return (
1310
+ self._walk(
1311
+ start=(
1312
+ self.get_revision(symbol)
1313
+ if branch_label is None
1314
+ else self.get_revision(
1315
+ "%s@%s" % (branch_label, symbol)
1316
+ )
1317
+ ),
1318
+ steps=relative,
1319
+ no_overwalk=assert_relative_length,
1320
+ ),
1321
+ )
1322
+
1323
+ def _collect_downgrade_revisions(
1324
+ self,
1325
+ upper: _RevisionIdentifierType,
1326
+ lower: _RevisionIdentifierType,
1327
+ inclusive: bool,
1328
+ implicit_base: bool,
1329
+ assert_relative_length: bool,
1330
+ ) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase], ...]]:
1331
+ """
1332
+ Compute the set of current revisions specified by :upper, and the
1333
+ downgrade target specified by :target. Return all dependents of target
1334
+ which are currently active.
1335
+
1336
+ :inclusive=True includes the target revision in the set
1337
+ """
1338
+
1339
+ branch_label, target_revision = self._parse_downgrade_target(
1340
+ current_revisions=upper,
1341
+ target=lower,
1342
+ assert_relative_length=assert_relative_length,
1343
+ )
1344
+ if target_revision == "base":
1345
+ target_revision = None
1346
+ assert target_revision is None or isinstance(target_revision, Revision)
1347
+
1348
+ roots: List[Revision]
1349
+ # Find candidates to drop.
1350
+ if target_revision is None:
1351
+ # Downgrading back to base: find all tree roots.
1352
+ roots = [
1353
+ rev
1354
+ for rev in self._revision_map.values()
1355
+ if rev is not None and rev.down_revision is None
1356
+ ]
1357
+ elif inclusive:
1358
+ # inclusive implies target revision should also be dropped
1359
+ roots = [target_revision]
1360
+ else:
1361
+ # Downgrading to fixed target: find all direct children.
1362
+ roots = [
1363
+ is_revision(rev)
1364
+ for rev in self.get_revisions(target_revision.nextrev)
1365
+ ]
1366
+
1367
+ if branch_label and len(roots) > 1:
1368
+ # Need to filter roots.
1369
+ ancestors = {
1370
+ rev.revision
1371
+ for rev in self._get_ancestor_nodes(
1372
+ [self._resolve_branch(branch_label)],
1373
+ include_dependencies=False,
1374
+ )
1375
+ }
1376
+ # Intersection gives the root revisions we are trying to
1377
+ # rollback with the downgrade.
1378
+ roots = [
1379
+ is_revision(rev)
1380
+ for rev in self.get_revisions(
1381
+ {rev.revision for rev in roots}.intersection(ancestors)
1382
+ )
1383
+ ]
1384
+
1385
+ # Ensure we didn't throw everything away when filtering branches.
1386
+ if len(roots) == 0:
1387
+ raise RevisionError(
1388
+ "Not a valid downgrade target from current heads"
1389
+ )
1390
+
1391
+ heads = self.get_revisions(upper)
1392
+
1393
+ # Aim is to drop :branch_revision; to do so we also need to drop its
1394
+ # descendents and anything dependent on it.
1395
+ downgrade_revisions = set(
1396
+ self._get_descendant_nodes(
1397
+ roots,
1398
+ include_dependencies=True,
1399
+ omit_immediate_dependencies=False,
1400
+ )
1401
+ )
1402
+ active_revisions = set(
1403
+ self._get_ancestor_nodes(heads, include_dependencies=True)
1404
+ )
1405
+
1406
+ # Emit revisions to drop in reverse topological sorted order.
1407
+ downgrade_revisions.intersection_update(active_revisions)
1408
+
1409
+ if implicit_base:
1410
+ # Wind other branches back to base.
1411
+ downgrade_revisions.update(
1412
+ active_revisions.difference(self._get_ancestor_nodes(roots))
1413
+ )
1414
+
1415
+ if (
1416
+ target_revision is not None
1417
+ and not downgrade_revisions
1418
+ and target_revision not in heads
1419
+ ):
1420
+ # Empty intersection: target revs are not present.
1421
+
1422
+ raise RangeNotAncestorError("Nothing to drop", upper)
1423
+
1424
+ return downgrade_revisions, heads
1425
+
1426
+ def _collect_upgrade_revisions(
1427
+ self,
1428
+ upper: _RevisionIdentifierType,
1429
+ lower: _RevisionIdentifierType,
1430
+ inclusive: bool,
1431
+ implicit_base: bool,
1432
+ assert_relative_length: bool,
1433
+ ) -> Tuple[Set[Revision], Tuple[Revision, ...]]:
1434
+ """
1435
+ Compute the set of required revisions specified by :upper, and the
1436
+ current set of active revisions specified by :lower. Find the
1437
+ difference between the two to compute the required upgrades.
1438
+
1439
+ :inclusive=True includes the current/lower revisions in the set
1440
+
1441
+ :implicit_base=False only returns revisions which are downstream
1442
+ of the current/lower revisions. Dependencies from branches with
1443
+ different bases will not be included.
1444
+ """
1445
+ targets: Collection[Revision] = [
1446
+ is_revision(rev)
1447
+ for rev in self._parse_upgrade_target(
1448
+ current_revisions=lower,
1449
+ target=upper,
1450
+ assert_relative_length=assert_relative_length,
1451
+ )
1452
+ ]
1453
+
1454
+ # assert type(targets) is tuple, "targets should be a tuple"
1455
+
1456
+ # Handled named bases (e.g. branch@... -> heads should only produce
1457
+ # targets on the given branch)
1458
+ if isinstance(lower, str) and "@" in lower:
1459
+ branch, _, _ = lower.partition("@")
1460
+ branch_rev = self.get_revision(branch)
1461
+ if branch_rev is not None and branch_rev.revision == branch:
1462
+ # A revision was used as a label; get its branch instead
1463
+ assert len(branch_rev.branch_labels) == 1
1464
+ branch = next(iter(branch_rev.branch_labels))
1465
+ targets = {
1466
+ need for need in targets if branch in need.branch_labels
1467
+ }
1468
+
1469
+ required_node_set = set(
1470
+ self._get_ancestor_nodes(
1471
+ targets, check=True, include_dependencies=True
1472
+ )
1473
+ ).union(targets)
1474
+
1475
+ current_revisions = self.get_revisions(lower)
1476
+ if not implicit_base and any(
1477
+ rev not in required_node_set
1478
+ for rev in current_revisions
1479
+ if rev is not None
1480
+ ):
1481
+ raise RangeNotAncestorError(lower, upper)
1482
+ assert (
1483
+ type(current_revisions) is tuple
1484
+ ), "current_revisions should be a tuple"
1485
+
1486
+ # Special case where lower = a relative value (get_revisions can't
1487
+ # find it)
1488
+ if current_revisions and current_revisions[0] is None:
1489
+ _, rev = self._parse_downgrade_target(
1490
+ current_revisions=upper,
1491
+ target=lower,
1492
+ assert_relative_length=assert_relative_length,
1493
+ )
1494
+ assert rev
1495
+ if rev == "base":
1496
+ current_revisions = tuple()
1497
+ lower = None
1498
+ else:
1499
+ current_revisions = (rev,)
1500
+ lower = rev.revision
1501
+
1502
+ current_node_set = set(
1503
+ self._get_ancestor_nodes(
1504
+ current_revisions, check=True, include_dependencies=True
1505
+ )
1506
+ ).union(current_revisions)
1507
+
1508
+ needs = required_node_set.difference(current_node_set)
1509
+
1510
+ # Include the lower revision (=current_revisions?) in the iteration
1511
+ if inclusive:
1512
+ needs.update(is_revision(rev) for rev in self.get_revisions(lower))
1513
+ # By default, base is implicit as we want all dependencies returned.
1514
+ # Base is also implicit if lower = base
1515
+ # implicit_base=False -> only return direct downstreams of
1516
+ # current_revisions
1517
+ if current_revisions and not implicit_base:
1518
+ lower_descendents = self._get_descendant_nodes(
1519
+ [is_revision(rev) for rev in current_revisions],
1520
+ check=True,
1521
+ include_dependencies=False,
1522
+ )
1523
+ needs.intersection_update(lower_descendents)
1524
+
1525
+ return needs, tuple(targets)
1526
+
1527
+ def _get_all_current(
1528
+ self, id_: Tuple[str, ...]
1529
+ ) -> Set[Optional[_RevisionOrBase]]:
1530
+ top_revs: Set[Optional[_RevisionOrBase]]
1531
+ top_revs = set(self.get_revisions(id_))
1532
+ top_revs.update(
1533
+ self._get_ancestor_nodes(list(top_revs), include_dependencies=True)
1534
+ )
1535
+ return self._filter_into_branch_heads(top_revs)
1536
+
1537
+
1538
+ class Revision:
1539
+ """Base class for revisioned objects.
1540
+
1541
+ The :class:`.Revision` class is the base of the more public-facing
1542
+ :class:`.Script` object, which represents a migration script.
1543
+ The mechanics of revision management and traversal are encapsulated
1544
+ within :class:`.Revision`, while :class:`.Script` applies this logic
1545
+ to Python files in a version directory.
1546
+
1547
+ """
1548
+
1549
+ nextrev: FrozenSet[str] = frozenset()
1550
+ """following revisions, based on down_revision only."""
1551
+
1552
+ _all_nextrev: FrozenSet[str] = frozenset()
1553
+
1554
+ revision: str = None # type: ignore[assignment]
1555
+ """The string revision number."""
1556
+
1557
+ down_revision: Optional[_RevIdType] = None
1558
+ """The ``down_revision`` identifier(s) within the migration script.
1559
+
1560
+ Note that the total set of "down" revisions is
1561
+ down_revision + dependencies.
1562
+
1563
+ """
1564
+
1565
+ dependencies: Optional[_RevIdType] = None
1566
+ """Additional revisions which this revision is dependent on.
1567
+
1568
+ From a migration standpoint, these dependencies are added to the
1569
+ down_revision to form the full iteration. However, the separation
1570
+ of down_revision from "dependencies" is to assist in navigating
1571
+ a history that contains many branches, typically a multi-root scenario.
1572
+
1573
+ """
1574
+
1575
+ branch_labels: Set[str] = None # type: ignore[assignment]
1576
+ """Optional string/tuple of symbolic names to apply to this
1577
+ revision's branch"""
1578
+
1579
+ _resolved_dependencies: Tuple[str, ...]
1580
+ _normalized_resolved_dependencies: Tuple[str, ...]
1581
+
1582
+ @classmethod
1583
+ def verify_rev_id(cls, revision: str) -> None:
1584
+ illegal_chars = set(revision).intersection(_revision_illegal_chars)
1585
+ if illegal_chars:
1586
+ raise RevisionError(
1587
+ "Character(s) '%s' not allowed in revision identifier '%s'"
1588
+ % (", ".join(sorted(illegal_chars)), revision)
1589
+ )
1590
+
1591
+ def __init__(
1592
+ self,
1593
+ revision: str,
1594
+ down_revision: Optional[Union[str, Tuple[str, ...]]],
1595
+ dependencies: Optional[Union[str, Tuple[str, ...]]] = None,
1596
+ branch_labels: Optional[Union[str, Tuple[str, ...]]] = None,
1597
+ ) -> None:
1598
+ if down_revision and revision in util.to_tuple(down_revision):
1599
+ raise LoopDetected(revision)
1600
+ elif dependencies is not None and revision in util.to_tuple(
1601
+ dependencies
1602
+ ):
1603
+ raise DependencyLoopDetected(revision)
1604
+
1605
+ self.verify_rev_id(revision)
1606
+ self.revision = revision
1607
+ self.down_revision = tuple_rev_as_scalar(util.to_tuple(down_revision))
1608
+ self.dependencies = tuple_rev_as_scalar(util.to_tuple(dependencies))
1609
+ self._orig_branch_labels = util.to_tuple(branch_labels, default=())
1610
+ self.branch_labels = set(self._orig_branch_labels)
1611
+
1612
+ def __repr__(self) -> str:
1613
+ args = [repr(self.revision), repr(self.down_revision)]
1614
+ if self.dependencies:
1615
+ args.append("dependencies=%r" % (self.dependencies,))
1616
+ if self.branch_labels:
1617
+ args.append("branch_labels=%r" % (self.branch_labels,))
1618
+ return "%s(%s)" % (self.__class__.__name__, ", ".join(args))
1619
+
1620
+ def add_nextrev(self, revision: Revision) -> None:
1621
+ self._all_nextrev = self._all_nextrev.union([revision.revision])
1622
+ if self.revision in revision._versioned_down_revisions:
1623
+ self.nextrev = self.nextrev.union([revision.revision])
1624
+
1625
+ @property
1626
+ def _all_down_revisions(self) -> Tuple[str, ...]:
1627
+ return util.dedupe_tuple(
1628
+ util.to_tuple(self.down_revision, default=())
1629
+ + self._resolved_dependencies
1630
+ )
1631
+
1632
+ @property
1633
+ def _normalized_down_revisions(self) -> Tuple[str, ...]:
1634
+ """return immediate down revisions for a rev, omitting dependencies
1635
+ that are still dependencies of ancestors.
1636
+
1637
+ """
1638
+ return util.dedupe_tuple(
1639
+ util.to_tuple(self.down_revision, default=())
1640
+ + self._normalized_resolved_dependencies
1641
+ )
1642
+
1643
+ @property
1644
+ def _versioned_down_revisions(self) -> Tuple[str, ...]:
1645
+ return util.to_tuple(self.down_revision, default=())
1646
+
1647
+ @property
1648
+ def is_head(self) -> bool:
1649
+ """Return True if this :class:`.Revision` is a 'head' revision.
1650
+
1651
+ This is determined based on whether any other :class:`.Script`
1652
+ within the :class:`.ScriptDirectory` refers to this
1653
+ :class:`.Script`. Multiple heads can be present.
1654
+
1655
+ """
1656
+ return not bool(self.nextrev)
1657
+
1658
+ @property
1659
+ def _is_real_head(self) -> bool:
1660
+ return not bool(self._all_nextrev)
1661
+
1662
+ @property
1663
+ def is_base(self) -> bool:
1664
+ """Return True if this :class:`.Revision` is a 'base' revision."""
1665
+
1666
+ return self.down_revision is None
1667
+
1668
+ @property
1669
+ def _is_real_base(self) -> bool:
1670
+ """Return True if this :class:`.Revision` is a "real" base revision,
1671
+ e.g. that it has no dependencies either."""
1672
+
1673
+ # we use self.dependencies here because this is called up
1674
+ # in initialization where _real_dependencies isn't set up
1675
+ # yet
1676
+ return self.down_revision is None and self.dependencies is None
1677
+
1678
+ @property
1679
+ def is_branch_point(self) -> bool:
1680
+ """Return True if this :class:`.Script` is a branch point.
1681
+
1682
+ A branchpoint is defined as a :class:`.Script` which is referred
1683
+ to by more than one succeeding :class:`.Script`, that is more
1684
+ than one :class:`.Script` has a `down_revision` identifier pointing
1685
+ here.
1686
+
1687
+ """
1688
+ return len(self.nextrev) > 1
1689
+
1690
+ @property
1691
+ def _is_real_branch_point(self) -> bool:
1692
+ """Return True if this :class:`.Script` is a 'real' branch point,
1693
+ taking into account dependencies as well.
1694
+
1695
+ """
1696
+ return len(self._all_nextrev) > 1
1697
+
1698
+ @property
1699
+ def is_merge_point(self) -> bool:
1700
+ """Return True if this :class:`.Script` is a merge point."""
1701
+
1702
+ return len(self._versioned_down_revisions) > 1
1703
+
1704
+
1705
+ @overload
1706
+ def tuple_rev_as_scalar(rev: None) -> None: ...
1707
+
1708
+
1709
+ @overload
1710
+ def tuple_rev_as_scalar(
1711
+ rev: Union[Tuple[_T, ...], List[_T]],
1712
+ ) -> Union[_T, Tuple[_T, ...], List[_T]]: ...
1713
+
1714
+
1715
+ def tuple_rev_as_scalar(
1716
+ rev: Optional[Sequence[_T]],
1717
+ ) -> Union[_T, Sequence[_T], None]:
1718
+ if not rev:
1719
+ return None
1720
+ elif len(rev) == 1:
1721
+ return rev[0]
1722
+ else:
1723
+ return rev
1724
+
1725
+
1726
+ def is_revision(rev: Any) -> Revision:
1727
+ assert isinstance(rev, Revision)
1728
+ return rev
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/script/write_hooks.py ADDED
@@ -0,0 +1,176 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls
2
+ # mypy: no-warn-return-any, allow-any-generics
3
+
4
+ from __future__ import annotations
5
+
6
+ import importlib.util
7
+ import os
8
+ import shlex
9
+ import subprocess
10
+ import sys
11
+ from typing import Any
12
+ from typing import Callable
13
+ from typing import Dict
14
+ from typing import List
15
+ from typing import Optional
16
+ from typing import TYPE_CHECKING
17
+ from typing import Union
18
+
19
+ from .. import util
20
+ from ..util import compat
21
+ from ..util.pyfiles import _preserving_path_as_str
22
+
23
+ if TYPE_CHECKING:
24
+ from ..config import PostWriteHookConfig
25
+
26
+ REVISION_SCRIPT_TOKEN = "REVISION_SCRIPT_FILENAME"
27
+
28
+ _registry: dict = {}
29
+
30
+
31
+ def register(name: str) -> Callable:
32
+ """A function decorator that will register that function as a write hook.
33
+
34
+ See the documentation linked below for an example.
35
+
36
+ .. seealso::
37
+
38
+ :ref:`post_write_hooks_custom`
39
+
40
+
41
+ """
42
+
43
+ def decorate(fn):
44
+ _registry[name] = fn
45
+ return fn
46
+
47
+ return decorate
48
+
49
+
50
+ def _invoke(
51
+ name: str,
52
+ revision_path: Union[str, os.PathLike[str]],
53
+ options: PostWriteHookConfig,
54
+ ) -> Any:
55
+ """Invokes the formatter registered for the given name.
56
+
57
+ :param name: The name of a formatter in the registry
58
+ :param revision: string path to the revision file
59
+ :param options: A dict containing kwargs passed to the
60
+ specified formatter.
61
+ :raises: :class:`alembic.util.CommandError`
62
+ """
63
+ revision_path = _preserving_path_as_str(revision_path)
64
+ try:
65
+ hook = _registry[name]
66
+ except KeyError as ke:
67
+ raise util.CommandError(
68
+ f"No formatter with name '{name}' registered"
69
+ ) from ke
70
+ else:
71
+ return hook(revision_path, options)
72
+
73
+
74
+ def _run_hooks(
75
+ path: Union[str, os.PathLike[str]], hooks: list[PostWriteHookConfig]
76
+ ) -> None:
77
+ """Invoke hooks for a generated revision."""
78
+
79
+ for hook in hooks:
80
+ name = hook["_hook_name"]
81
+ try:
82
+ type_ = hook["type"]
83
+ except KeyError as ke:
84
+ raise util.CommandError(
85
+ f"Key '{name}.type' (or 'type' in toml) is required "
86
+ f"for post write hook {name!r}"
87
+ ) from ke
88
+ else:
89
+ with util.status(
90
+ f"Running post write hook {name!r}", newline=True
91
+ ):
92
+ _invoke(type_, path, hook)
93
+
94
+
95
+ def _parse_cmdline_options(cmdline_options_str: str, path: str) -> List[str]:
96
+ """Parse options from a string into a list.
97
+
98
+ Also substitutes the revision script token with the actual filename of
99
+ the revision script.
100
+
101
+ If the revision script token doesn't occur in the options string, it is
102
+ automatically prepended.
103
+ """
104
+ if REVISION_SCRIPT_TOKEN not in cmdline_options_str:
105
+ cmdline_options_str = REVISION_SCRIPT_TOKEN + " " + cmdline_options_str
106
+ cmdline_options_list = shlex.split(
107
+ cmdline_options_str, posix=compat.is_posix
108
+ )
109
+ cmdline_options_list = [
110
+ option.replace(REVISION_SCRIPT_TOKEN, path)
111
+ for option in cmdline_options_list
112
+ ]
113
+ return cmdline_options_list
114
+
115
+
116
+ def _get_required_option(options: dict, name: str) -> str:
117
+ try:
118
+ return options[name]
119
+ except KeyError as ke:
120
+ raise util.CommandError(
121
+ f"Key {options['_hook_name']}.{name} is required for post "
122
+ f"write hook {options['_hook_name']!r}"
123
+ ) from ke
124
+
125
+
126
+ def _run_hook(
127
+ path: str, options: dict, ignore_output: bool, command: List[str]
128
+ ) -> None:
129
+ cwd: Optional[str] = options.get("cwd", None)
130
+ cmdline_options_str = options.get("options", "")
131
+ cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path)
132
+
133
+ kw: Dict[str, Any] = {}
134
+ if ignore_output:
135
+ kw["stdout"] = kw["stderr"] = subprocess.DEVNULL
136
+
137
+ subprocess.run([*command, *cmdline_options_list], cwd=cwd, **kw)
138
+
139
+
140
+ @register("console_scripts")
141
+ def console_scripts(
142
+ path: str, options: dict, ignore_output: bool = False
143
+ ) -> None:
144
+ entrypoint_name = _get_required_option(options, "entrypoint")
145
+ for entry in compat.importlib_metadata_get("console_scripts"):
146
+ if entry.name == entrypoint_name:
147
+ impl: Any = entry
148
+ break
149
+ else:
150
+ raise util.CommandError(
151
+ f"Could not find entrypoint console_scripts.{entrypoint_name}"
152
+ )
153
+
154
+ command = [
155
+ sys.executable,
156
+ "-c",
157
+ f"import {impl.module}; {impl.module}.{impl.attr}()",
158
+ ]
159
+ _run_hook(path, options, ignore_output, command)
160
+
161
+
162
+ @register("exec")
163
+ def exec_(path: str, options: dict, ignore_output: bool = False) -> None:
164
+ executable = _get_required_option(options, "executable")
165
+ _run_hook(path, options, ignore_output, command=[executable])
166
+
167
+
168
+ @register("module")
169
+ def module(path: str, options: dict, ignore_output: bool = False) -> None:
170
+ module_name = _get_required_option(options, "module")
171
+
172
+ if importlib.util.find_spec(module_name) is None:
173
+ raise util.CommandError(f"Could not find module {module_name}")
174
+
175
+ command = [sys.executable, "-m", module_name]
176
+ _run_hook(path, options, ignore_output, command)
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/async/README ADDED
@@ -0,0 +1 @@
 
 
1
+ Generic single-database configuration with an async dbapi.
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/async/alembic.ini.mako ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts.
5
+ # this is typically a path given in POSIX (e.g. forward slashes)
6
+ # format, relative to the token %(here)s which refers to the location of this
7
+ # ini file
8
+ script_location = ${script_location}
9
+
10
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
11
+ # Uncomment the line below if you want the files to be prepended with date and time
12
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
13
+ # for all available tokens
14
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
15
+
16
+ # sys.path path, will be prepended to sys.path if present.
17
+ # defaults to the current working directory. for multiple paths, the path separator
18
+ # is defined by "path_separator" below.
19
+ prepend_sys_path = .
20
+
21
+ # timezone to use when rendering the date within the migration file
22
+ # as well as the filename.
23
+ # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
24
+ # Any required deps can installed by adding `alembic[tz]` to the pip requirements
25
+ # string value is passed to ZoneInfo()
26
+ # leave blank for localtime
27
+ # timezone =
28
+
29
+ # max length of characters to apply to the "slug" field
30
+ # truncate_slug_length = 40
31
+
32
+ # set to 'true' to run the environment during
33
+ # the 'revision' command, regardless of autogenerate
34
+ # revision_environment = false
35
+
36
+ # set to 'true' to allow .pyc and .pyo files without
37
+ # a source .py file to be detected as revisions in the
38
+ # versions/ directory
39
+ # sourceless = false
40
+
41
+ # version location specification; This defaults
42
+ # to <script_location>/versions. When using multiple version
43
+ # directories, initial revisions must be specified with --version-path.
44
+ # The path separator used here should be the separator specified by "path_separator"
45
+ # below.
46
+ # version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
47
+
48
+ # path_separator; This indicates what character is used to split lists of file
49
+ # paths, including version_locations and prepend_sys_path within configparser
50
+ # files such as alembic.ini.
51
+ # The default rendered in new alembic.ini files is "os", which uses os.pathsep
52
+ # to provide os-dependent path splitting.
53
+ #
54
+ # Note that in order to support legacy alembic.ini files, this default does NOT
55
+ # take place if path_separator is not present in alembic.ini. If this
56
+ # option is omitted entirely, fallback logic is as follows:
57
+ #
58
+ # 1. Parsing of the version_locations option falls back to using the legacy
59
+ # "version_path_separator" key, which if absent then falls back to the legacy
60
+ # behavior of splitting on spaces and/or commas.
61
+ # 2. Parsing of the prepend_sys_path option falls back to the legacy
62
+ # behavior of splitting on spaces, commas, or colons.
63
+ #
64
+ # Valid values for path_separator are:
65
+ #
66
+ # path_separator = :
67
+ # path_separator = ;
68
+ # path_separator = space
69
+ # path_separator = newline
70
+ #
71
+ # Use os.pathsep. Default configuration used for new projects.
72
+ path_separator = os
73
+
74
+
75
+ # set to 'true' to search source files recursively
76
+ # in each "version_locations" directory
77
+ # new in Alembic version 1.10
78
+ # recursive_version_locations = false
79
+
80
+ # the output encoding used when revision files
81
+ # are written from script.py.mako
82
+ # output_encoding = utf-8
83
+
84
+ # database URL. This is consumed by the user-maintained env.py script only.
85
+ # other means of configuring database URLs may be customized within the env.py
86
+ # file.
87
+ sqlalchemy.url = driver://user:pass@localhost/dbname
88
+
89
+
90
+ [post_write_hooks]
91
+ # post_write_hooks defines scripts or Python functions that are run
92
+ # on newly generated revision scripts. See the documentation for further
93
+ # detail and examples
94
+
95
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
96
+ # hooks = black
97
+ # black.type = console_scripts
98
+ # black.entrypoint = black
99
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
100
+
101
+ # lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
102
+ # hooks = ruff
103
+ # ruff.type = module
104
+ # ruff.module = ruff
105
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
106
+
107
+ # Alternatively, use the exec runner to execute a binary found on your PATH
108
+ # hooks = ruff
109
+ # ruff.type = exec
110
+ # ruff.executable = ruff
111
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
112
+
113
+ # Logging configuration. This is also consumed by the user-maintained
114
+ # env.py script only.
115
+ [loggers]
116
+ keys = root,sqlalchemy,alembic
117
+
118
+ [handlers]
119
+ keys = console
120
+
121
+ [formatters]
122
+ keys = generic
123
+
124
+ [logger_root]
125
+ level = WARNING
126
+ handlers = console
127
+ qualname =
128
+
129
+ [logger_sqlalchemy]
130
+ level = WARNING
131
+ handlers =
132
+ qualname = sqlalchemy.engine
133
+
134
+ [logger_alembic]
135
+ level = INFO
136
+ handlers =
137
+ qualname = alembic
138
+
139
+ [handler_console]
140
+ class = StreamHandler
141
+ args = (sys.stderr,)
142
+ level = NOTSET
143
+ formatter = generic
144
+
145
+ [formatter_generic]
146
+ format = %(levelname)-5.5s [%(name)s] %(message)s
147
+ datefmt = %H:%M:%S
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/async/env.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from logging.config import fileConfig
3
+
4
+ from sqlalchemy import pool
5
+ from sqlalchemy.engine import Connection
6
+ from sqlalchemy.ext.asyncio import async_engine_from_config
7
+
8
+ from alembic import context
9
+
10
+ # this is the Alembic Config object, which provides
11
+ # access to the values within the .ini file in use.
12
+ config = context.config
13
+
14
+ # Interpret the config file for Python logging.
15
+ # This line sets up loggers basically.
16
+ if config.config_file_name is not None:
17
+ fileConfig(config.config_file_name)
18
+
19
+ # add your model's MetaData object here
20
+ # for 'autogenerate' support
21
+ # from myapp import mymodel
22
+ # target_metadata = mymodel.Base.metadata
23
+ target_metadata = None
24
+
25
+ # other values from the config, defined by the needs of env.py,
26
+ # can be acquired:
27
+ # my_important_option = config.get_main_option("my_important_option")
28
+ # ... etc.
29
+
30
+
31
+ def run_migrations_offline() -> None:
32
+ """Run migrations in 'offline' mode.
33
+
34
+ This configures the context with just a URL
35
+ and not an Engine, though an Engine is acceptable
36
+ here as well. By skipping the Engine creation
37
+ we don't even need a DBAPI to be available.
38
+
39
+ Calls to context.execute() here emit the given string to the
40
+ script output.
41
+
42
+ """
43
+ url = config.get_main_option("sqlalchemy.url")
44
+ context.configure(
45
+ url=url,
46
+ target_metadata=target_metadata,
47
+ literal_binds=True,
48
+ dialect_opts={"paramstyle": "named"},
49
+ )
50
+
51
+ with context.begin_transaction():
52
+ context.run_migrations()
53
+
54
+
55
+ def do_run_migrations(connection: Connection) -> None:
56
+ context.configure(connection=connection, target_metadata=target_metadata)
57
+
58
+ with context.begin_transaction():
59
+ context.run_migrations()
60
+
61
+
62
+ async def run_async_migrations() -> None:
63
+ """In this scenario we need to create an Engine
64
+ and associate a connection with the context.
65
+
66
+ """
67
+
68
+ connectable = async_engine_from_config(
69
+ config.get_section(config.config_ini_section, {}),
70
+ prefix="sqlalchemy.",
71
+ poolclass=pool.NullPool,
72
+ )
73
+
74
+ async with connectable.connect() as connection:
75
+ await connection.run_sync(do_run_migrations)
76
+
77
+ await connectable.dispose()
78
+
79
+
80
+ def run_migrations_online() -> None:
81
+ """Run migrations in 'online' mode."""
82
+
83
+ asyncio.run(run_async_migrations())
84
+
85
+
86
+ if context.is_offline_mode():
87
+ run_migrations_offline()
88
+ else:
89
+ run_migrations_online()
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/async/script.py.mako ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ ${imports if imports else ""}
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = ${repr(up_revision)}
16
+ down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
17
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade schema."""
23
+ ${upgrades if upgrades else "pass"}
24
+
25
+
26
+ def downgrade() -> None:
27
+ """Downgrade schema."""
28
+ ${downgrades if downgrades else "pass"}
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/generic/README ADDED
@@ -0,0 +1 @@
 
 
1
+ Generic single-database configuration.
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/generic/alembic.ini.mako ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # A generic, single database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts.
5
+ # this is typically a path given in POSIX (e.g. forward slashes)
6
+ # format, relative to the token %(here)s which refers to the location of this
7
+ # ini file
8
+ script_location = ${script_location}
9
+
10
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
11
+ # Uncomment the line below if you want the files to be prepended with date and time
12
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
13
+ # for all available tokens
14
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
15
+
16
+ # sys.path path, will be prepended to sys.path if present.
17
+ # defaults to the current working directory. for multiple paths, the path separator
18
+ # is defined by "path_separator" below.
19
+ prepend_sys_path = .
20
+
21
+
22
+ # timezone to use when rendering the date within the migration file
23
+ # as well as the filename.
24
+ # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
25
+ # Any required deps can installed by adding `alembic[tz]` to the pip requirements
26
+ # string value is passed to ZoneInfo()
27
+ # leave blank for localtime
28
+ # timezone =
29
+
30
+ # max length of characters to apply to the "slug" field
31
+ # truncate_slug_length = 40
32
+
33
+ # set to 'true' to run the environment during
34
+ # the 'revision' command, regardless of autogenerate
35
+ # revision_environment = false
36
+
37
+ # set to 'true' to allow .pyc and .pyo files without
38
+ # a source .py file to be detected as revisions in the
39
+ # versions/ directory
40
+ # sourceless = false
41
+
42
+ # version location specification; This defaults
43
+ # to <script_location>/versions. When using multiple version
44
+ # directories, initial revisions must be specified with --version-path.
45
+ # The path separator used here should be the separator specified by "path_separator"
46
+ # below.
47
+ # version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
48
+
49
+ # path_separator; This indicates what character is used to split lists of file
50
+ # paths, including version_locations and prepend_sys_path within configparser
51
+ # files such as alembic.ini.
52
+ # The default rendered in new alembic.ini files is "os", which uses os.pathsep
53
+ # to provide os-dependent path splitting.
54
+ #
55
+ # Note that in order to support legacy alembic.ini files, this default does NOT
56
+ # take place if path_separator is not present in alembic.ini. If this
57
+ # option is omitted entirely, fallback logic is as follows:
58
+ #
59
+ # 1. Parsing of the version_locations option falls back to using the legacy
60
+ # "version_path_separator" key, which if absent then falls back to the legacy
61
+ # behavior of splitting on spaces and/or commas.
62
+ # 2. Parsing of the prepend_sys_path option falls back to the legacy
63
+ # behavior of splitting on spaces, commas, or colons.
64
+ #
65
+ # Valid values for path_separator are:
66
+ #
67
+ # path_separator = :
68
+ # path_separator = ;
69
+ # path_separator = space
70
+ # path_separator = newline
71
+ #
72
+ # Use os.pathsep. Default configuration used for new projects.
73
+ path_separator = os
74
+
75
+ # set to 'true' to search source files recursively
76
+ # in each "version_locations" directory
77
+ # new in Alembic version 1.10
78
+ # recursive_version_locations = false
79
+
80
+ # the output encoding used when revision files
81
+ # are written from script.py.mako
82
+ # output_encoding = utf-8
83
+
84
+ # database URL. This is consumed by the user-maintained env.py script only.
85
+ # other means of configuring database URLs may be customized within the env.py
86
+ # file.
87
+ sqlalchemy.url = driver://user:pass@localhost/dbname
88
+
89
+
90
+ [post_write_hooks]
91
+ # post_write_hooks defines scripts or Python functions that are run
92
+ # on newly generated revision scripts. See the documentation for further
93
+ # detail and examples
94
+
95
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
96
+ # hooks = black
97
+ # black.type = console_scripts
98
+ # black.entrypoint = black
99
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
100
+
101
+ # lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
102
+ # hooks = ruff
103
+ # ruff.type = module
104
+ # ruff.module = ruff
105
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
106
+
107
+ # Alternatively, use the exec runner to execute a binary found on your PATH
108
+ # hooks = ruff
109
+ # ruff.type = exec
110
+ # ruff.executable = ruff
111
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
112
+
113
+ # Logging configuration. This is also consumed by the user-maintained
114
+ # env.py script only.
115
+ [loggers]
116
+ keys = root,sqlalchemy,alembic
117
+
118
+ [handlers]
119
+ keys = console
120
+
121
+ [formatters]
122
+ keys = generic
123
+
124
+ [logger_root]
125
+ level = WARNING
126
+ handlers = console
127
+ qualname =
128
+
129
+ [logger_sqlalchemy]
130
+ level = WARNING
131
+ handlers =
132
+ qualname = sqlalchemy.engine
133
+
134
+ [logger_alembic]
135
+ level = INFO
136
+ handlers =
137
+ qualname = alembic
138
+
139
+ [handler_console]
140
+ class = StreamHandler
141
+ args = (sys.stderr,)
142
+ level = NOTSET
143
+ formatter = generic
144
+
145
+ [formatter_generic]
146
+ format = %(levelname)-5.5s [%(name)s] %(message)s
147
+ datefmt = %H:%M:%S
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/generic/env.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from logging.config import fileConfig
2
+
3
+ from sqlalchemy import engine_from_config
4
+ from sqlalchemy import pool
5
+
6
+ from alembic import context
7
+
8
+ # this is the Alembic Config object, which provides
9
+ # access to the values within the .ini file in use.
10
+ config = context.config
11
+
12
+ # Interpret the config file for Python logging.
13
+ # This line sets up loggers basically.
14
+ if config.config_file_name is not None:
15
+ fileConfig(config.config_file_name)
16
+
17
+ # add your model's MetaData object here
18
+ # for 'autogenerate' support
19
+ # from myapp import mymodel
20
+ # target_metadata = mymodel.Base.metadata
21
+ target_metadata = None
22
+
23
+ # other values from the config, defined by the needs of env.py,
24
+ # can be acquired:
25
+ # my_important_option = config.get_main_option("my_important_option")
26
+ # ... etc.
27
+
28
+
29
+ def run_migrations_offline() -> None:
30
+ """Run migrations in 'offline' mode.
31
+
32
+ This configures the context with just a URL
33
+ and not an Engine, though an Engine is acceptable
34
+ here as well. By skipping the Engine creation
35
+ we don't even need a DBAPI to be available.
36
+
37
+ Calls to context.execute() here emit the given string to the
38
+ script output.
39
+
40
+ """
41
+ url = config.get_main_option("sqlalchemy.url")
42
+ context.configure(
43
+ url=url,
44
+ target_metadata=target_metadata,
45
+ literal_binds=True,
46
+ dialect_opts={"paramstyle": "named"},
47
+ )
48
+
49
+ with context.begin_transaction():
50
+ context.run_migrations()
51
+
52
+
53
+ def run_migrations_online() -> None:
54
+ """Run migrations in 'online' mode.
55
+
56
+ In this scenario we need to create an Engine
57
+ and associate a connection with the context.
58
+
59
+ """
60
+ connectable = engine_from_config(
61
+ config.get_section(config.config_ini_section, {}),
62
+ prefix="sqlalchemy.",
63
+ poolclass=pool.NullPool,
64
+ )
65
+
66
+ with connectable.connect() as connection:
67
+ context.configure(
68
+ connection=connection, target_metadata=target_metadata
69
+ )
70
+
71
+ with context.begin_transaction():
72
+ context.run_migrations()
73
+
74
+
75
+ if context.is_offline_mode():
76
+ run_migrations_offline()
77
+ else:
78
+ run_migrations_online()
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/generic/script.py.mako ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """${message}
2
+
3
+ Revision ID: ${up_revision}
4
+ Revises: ${down_revision | comma,n}
5
+ Create Date: ${create_date}
6
+
7
+ """
8
+ from typing import Sequence, Union
9
+
10
+ from alembic import op
11
+ import sqlalchemy as sa
12
+ ${imports if imports else ""}
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = ${repr(up_revision)}
16
+ down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
17
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
18
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
19
+
20
+
21
+ def upgrade() -> None:
22
+ """Upgrade schema."""
23
+ ${upgrades if upgrades else "pass"}
24
+
25
+
26
+ def downgrade() -> None:
27
+ """Downgrade schema."""
28
+ ${downgrades if downgrades else "pass"}
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/multidb/README ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Rudimentary multi-database configuration.
2
+
3
+ Multi-DB isn't vastly different from generic. The primary difference is that it
4
+ will run the migrations N times (depending on how many databases you have
5
+ configured), providing one engine name and associated context for each run.
6
+
7
+ That engine name will then allow the migration to restrict what runs within it to
8
+ just the appropriate migrations for that engine. You can see this behavior within
9
+ the mako template.
10
+
11
+ In the provided configuration, you'll need to have `databases` provided in
12
+ alembic's config, and an `sqlalchemy.url` provided for each engine name.
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/multidb/alembic.ini.mako ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # a multi-database configuration.
2
+
3
+ [alembic]
4
+ # path to migration scripts.
5
+ # this is typically a path given in POSIX (e.g. forward slashes)
6
+ # format, relative to the token %(here)s which refers to the location of this
7
+ # ini file
8
+ script_location = ${script_location}
9
+
10
+ # template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
11
+ # Uncomment the line below if you want the files to be prepended with date and time
12
+ # see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
13
+ # for all available tokens
14
+ # file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
15
+
16
+ # sys.path path, will be prepended to sys.path if present.
17
+ # defaults to the current working directory. for multiple paths, the path separator
18
+ # is defined by "path_separator" below.
19
+ prepend_sys_path = .
20
+
21
+ # timezone to use when rendering the date within the migration file
22
+ # as well as the filename.
23
+ # If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
24
+ # Any required deps can installed by adding `alembic[tz]` to the pip requirements
25
+ # string value is passed to ZoneInfo()
26
+ # leave blank for localtime
27
+ # timezone =
28
+
29
+ # max length of characters to apply to the "slug" field
30
+ # truncate_slug_length = 40
31
+
32
+ # set to 'true' to run the environment during
33
+ # the 'revision' command, regardless of autogenerate
34
+ # revision_environment = false
35
+
36
+ # set to 'true' to allow .pyc and .pyo files without
37
+ # a source .py file to be detected as revisions in the
38
+ # versions/ directory
39
+ # sourceless = false
40
+
41
+ # version location specification; This defaults
42
+ # to <script_location>/versions. When using multiple version
43
+ # directories, initial revisions must be specified with --version-path.
44
+ # The path separator used here should be the separator specified by "path_separator"
45
+ # below.
46
+ # version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
47
+
48
+ # path_separator; This indicates what character is used to split lists of file
49
+ # paths, including version_locations and prepend_sys_path within configparser
50
+ # files such as alembic.ini.
51
+ # The default rendered in new alembic.ini files is "os", which uses os.pathsep
52
+ # to provide os-dependent path splitting.
53
+ #
54
+ # Note that in order to support legacy alembic.ini files, this default does NOT
55
+ # take place if path_separator is not present in alembic.ini. If this
56
+ # option is omitted entirely, fallback logic is as follows:
57
+ #
58
+ # 1. Parsing of the version_locations option falls back to using the legacy
59
+ # "version_path_separator" key, which if absent then falls back to the legacy
60
+ # behavior of splitting on spaces and/or commas.
61
+ # 2. Parsing of the prepend_sys_path option falls back to the legacy
62
+ # behavior of splitting on spaces, commas, or colons.
63
+ #
64
+ # Valid values for path_separator are:
65
+ #
66
+ # path_separator = :
67
+ # path_separator = ;
68
+ # path_separator = space
69
+ # path_separator = newline
70
+ #
71
+ # Use os.pathsep. Default configuration used for new projects.
72
+ path_separator = os
73
+
74
+ # set to 'true' to search source files recursively
75
+ # in each "version_locations" directory
76
+ # new in Alembic version 1.10
77
+ # recursive_version_locations = false
78
+
79
+ # the output encoding used when revision files
80
+ # are written from script.py.mako
81
+ # output_encoding = utf-8
82
+
83
+ # for multiple database configuration, new named sections are added
84
+ # which each include a distinct ``sqlalchemy.url`` entry. A custom value
85
+ # ``databases`` is added which indicates a listing of the per-database sections.
86
+ # The ``databases`` entry as well as the URLs present in the ``[engine1]``
87
+ # and ``[engine2]`` sections continue to be consumed by the user-maintained env.py
88
+ # script only.
89
+
90
+ databases = engine1, engine2
91
+
92
+ [engine1]
93
+ sqlalchemy.url = driver://user:pass@localhost/dbname
94
+
95
+ [engine2]
96
+ sqlalchemy.url = driver://user:pass@localhost/dbname2
97
+
98
+ [post_write_hooks]
99
+ # post_write_hooks defines scripts or Python functions that are run
100
+ # on newly generated revision scripts. See the documentation for further
101
+ # detail and examples
102
+
103
+ # format using "black" - use the console_scripts runner, against the "black" entrypoint
104
+ # hooks = black
105
+ # black.type = console_scripts
106
+ # black.entrypoint = black
107
+ # black.options = -l 79 REVISION_SCRIPT_FILENAME
108
+
109
+ # lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
110
+ # hooks = ruff
111
+ # ruff.type = module
112
+ # ruff.module = ruff
113
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
114
+
115
+ # Alternatively, use the exec runner to execute a binary found on your PATH
116
+ # hooks = ruff
117
+ # ruff.type = exec
118
+ # ruff.executable = ruff
119
+ # ruff.options = check --fix REVISION_SCRIPT_FILENAME
120
+
121
+ # Logging configuration. This is also consumed by the user-maintained
122
+ # env.py script only.
123
+ [loggers]
124
+ keys = root,sqlalchemy,alembic
125
+
126
+ [handlers]
127
+ keys = console
128
+
129
+ [formatters]
130
+ keys = generic
131
+
132
+ [logger_root]
133
+ level = WARNING
134
+ handlers = console
135
+ qualname =
136
+
137
+ [logger_sqlalchemy]
138
+ level = WARNING
139
+ handlers =
140
+ qualname = sqlalchemy.engine
141
+
142
+ [logger_alembic]
143
+ level = INFO
144
+ handlers =
145
+ qualname = alembic
146
+
147
+ [handler_console]
148
+ class = StreamHandler
149
+ args = (sys.stderr,)
150
+ level = NOTSET
151
+ formatter = generic
152
+
153
+ [formatter_generic]
154
+ format = %(levelname)-5.5s [%(name)s] %(message)s
155
+ datefmt = %H:%M:%S
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/multidb/env.py ADDED
@@ -0,0 +1,140 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from logging.config import fileConfig
3
+ import re
4
+
5
+ from sqlalchemy import engine_from_config
6
+ from sqlalchemy import pool
7
+
8
+ from alembic import context
9
+
10
+ USE_TWOPHASE = False
11
+
12
+ # this is the Alembic Config object, which provides
13
+ # access to the values within the .ini file in use.
14
+ config = context.config
15
+
16
+ # Interpret the config file for Python logging.
17
+ # This line sets up loggers basically.
18
+ if config.config_file_name is not None:
19
+ fileConfig(config.config_file_name)
20
+ logger = logging.getLogger("alembic.env")
21
+
22
+ # gather section names referring to different
23
+ # databases. These are named "engine1", "engine2"
24
+ # in the sample .ini file.
25
+ db_names = config.get_main_option("databases", "")
26
+
27
+ # add your model's MetaData objects here
28
+ # for 'autogenerate' support. These must be set
29
+ # up to hold just those tables targeting a
30
+ # particular database. table.tometadata() may be
31
+ # helpful here in case a "copy" of
32
+ # a MetaData is needed.
33
+ # from myapp import mymodel
34
+ # target_metadata = {
35
+ # 'engine1':mymodel.metadata1,
36
+ # 'engine2':mymodel.metadata2
37
+ # }
38
+ target_metadata = {}
39
+
40
+ # other values from the config, defined by the needs of env.py,
41
+ # can be acquired:
42
+ # my_important_option = config.get_main_option("my_important_option")
43
+ # ... etc.
44
+
45
+
46
+ def run_migrations_offline() -> None:
47
+ """Run migrations in 'offline' mode.
48
+
49
+ This configures the context with just a URL
50
+ and not an Engine, though an Engine is acceptable
51
+ here as well. By skipping the Engine creation
52
+ we don't even need a DBAPI to be available.
53
+
54
+ Calls to context.execute() here emit the given string to the
55
+ script output.
56
+
57
+ """
58
+ # for the --sql use case, run migrations for each URL into
59
+ # individual files.
60
+
61
+ engines = {}
62
+ for name in re.split(r",\s*", db_names):
63
+ engines[name] = rec = {}
64
+ rec["url"] = context.config.get_section_option(name, "sqlalchemy.url")
65
+
66
+ for name, rec in engines.items():
67
+ logger.info("Migrating database %s" % name)
68
+ file_ = "%s.sql" % name
69
+ logger.info("Writing output to %s" % file_)
70
+ with open(file_, "w") as buffer:
71
+ context.configure(
72
+ url=rec["url"],
73
+ output_buffer=buffer,
74
+ target_metadata=target_metadata.get(name),
75
+ literal_binds=True,
76
+ dialect_opts={"paramstyle": "named"},
77
+ )
78
+ with context.begin_transaction():
79
+ context.run_migrations(engine_name=name)
80
+
81
+
82
+ def run_migrations_online() -> None:
83
+ """Run migrations in 'online' mode.
84
+
85
+ In this scenario we need to create an Engine
86
+ and associate a connection with the context.
87
+
88
+ """
89
+
90
+ # for the direct-to-DB use case, start a transaction on all
91
+ # engines, then run all migrations, then commit all transactions.
92
+
93
+ engines = {}
94
+ for name in re.split(r",\s*", db_names):
95
+ engines[name] = rec = {}
96
+ rec["engine"] = engine_from_config(
97
+ context.config.get_section(name, {}),
98
+ prefix="sqlalchemy.",
99
+ poolclass=pool.NullPool,
100
+ )
101
+
102
+ for name, rec in engines.items():
103
+ engine = rec["engine"]
104
+ rec["connection"] = conn = engine.connect()
105
+
106
+ if USE_TWOPHASE:
107
+ rec["transaction"] = conn.begin_twophase()
108
+ else:
109
+ rec["transaction"] = conn.begin()
110
+
111
+ try:
112
+ for name, rec in engines.items():
113
+ logger.info("Migrating database %s" % name)
114
+ context.configure(
115
+ connection=rec["connection"],
116
+ upgrade_token="%s_upgrades" % name,
117
+ downgrade_token="%s_downgrades" % name,
118
+ target_metadata=target_metadata.get(name),
119
+ )
120
+ context.run_migrations(engine_name=name)
121
+
122
+ if USE_TWOPHASE:
123
+ for rec in engines.values():
124
+ rec["transaction"].prepare()
125
+
126
+ for rec in engines.values():
127
+ rec["transaction"].commit()
128
+ except:
129
+ for rec in engines.values():
130
+ rec["transaction"].rollback()
131
+ raise
132
+ finally:
133
+ for rec in engines.values():
134
+ rec["connection"].close()
135
+
136
+
137
+ if context.is_offline_mode():
138
+ run_migrations_offline()
139
+ else:
140
+ run_migrations_online()
Scripts_RSCM_sim_growth_n_climate_to_Yield/.venv/lib/python3.10/site-packages/alembic/templates/multidb/script.py.mako ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <%!
2
+ import re
3
+
4
+ %>"""${message}
5
+
6
+ Revision ID: ${up_revision}
7
+ Revises: ${down_revision | comma,n}
8
+ Create Date: ${create_date}
9
+
10
+ """
11
+ from typing import Sequence, Union
12
+
13
+ from alembic import op
14
+ import sqlalchemy as sa
15
+ ${imports if imports else ""}
16
+
17
+ # revision identifiers, used by Alembic.
18
+ revision: str = ${repr(up_revision)}
19
+ down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
20
+ branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
21
+ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
22
+
23
+
24
+ def upgrade(engine_name: str) -> None:
25
+ """Upgrade schema."""
26
+ globals()["upgrade_%s" % engine_name]()
27
+
28
+
29
+ def downgrade(engine_name: str) -> None:
30
+ """Downgrade schema."""
31
+ globals()["downgrade_%s" % engine_name]()
32
+
33
+ <%
34
+ db_names = config.get_main_option("databases")
35
+ %>
36
+
37
+ ## generate an "upgrade_<xyz>() / downgrade_<xyz>()" function
38
+ ## for each database name in the ini file.
39
+
40
+ % for db_name in re.split(r',\s*', db_names):
41
+
42
+ def upgrade_${db_name}() -> None:
43
+ """Upgrade ${db_name} schema."""
44
+ ${context.get("%s_upgrades" % db_name, "pass")}
45
+
46
+
47
+ def downgrade_${db_name}() -> None:
48
+ """Downgrade ${db_name} schema."""
49
+ ${context.get("%s_downgrades" % db_name, "pass")}
50
+
51
+ % endfor