Repository: tbicr/django-pg-zero-downtime-migrations Branch: master Commit: 1018d52cf2b0 Files: 257 Total size: 424.2 KB Directory structure: gitextract_4w34wnk2/ ├── .github/ │ ├── FUNDING.yml │ ├── ISSUE_TEMPLATE/ │ │ └── bug_report.md │ └── workflows/ │ ├── check.yml │ └── publish.yml ├── .gitignore ├── .pre-commit-config.yaml ├── AUTHORS ├── CHANGES.md ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── README.md ├── django_zero_downtime_migrations/ │ ├── __init__.py │ └── backends/ │ ├── __init__.py │ ├── postgis/ │ │ ├── __init__.py │ │ ├── base.py │ │ └── schema.py │ └── postgres/ │ ├── __init__.py │ ├── base.py │ └── schema.py ├── docker-compose.yml ├── docker_postgres_init.sql ├── manage.py ├── setup.cfg ├── setup.py ├── tests/ │ ├── __init__.py │ ├── apps/ │ │ ├── __init__.py │ │ ├── bad_flow_add_column_with_default_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_add_field_default.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── bad_flow_add_column_with_notnull_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_add_field_notnull.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── bad_flow_add_column_with_notnull_default_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_add_field_notnull_default.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── bad_flow_change_char_type_that_unsafe_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_change_type_from_char120_to_char100.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── bad_rollback_flow_change_char_type_that_safe_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_change_type_safe_from_char100_to_char120.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── bad_rollback_flow_drop_column_with_notnull_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_drop_field_not_null.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── bad_rollback_flow_drop_column_with_notnull_default_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_drop_field_not_null_default.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── decimal_to_float_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_type_conversion.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── good_flow_alter_table_with_same_db_table/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_rename_model.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── good_flow_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_add_nullable_field.py │ │ │ │ ├── 0003_set_field_default.py │ │ │ │ ├── 0004_set_field_not_null.py │ │ │ │ ├── 0005_drop_field_not_null.py │ │ │ │ ├── 0006_drop_field_default.py │ │ │ │ ├── 0007_drop_field.py │ │ │ │ ├── 0008_add_field_with_check_constraint.py │ │ │ │ ├── 0009_drop_field_with_check_constraint.py │ │ │ │ ├── 0010_add_field_with_foreign_key.py │ │ │ │ ├── 0011_drop_field_with_foreign_key.py │ │ │ │ ├── 0012_add_field_with_unique_constraint.py │ │ │ │ ├── 0013_drop_field_with_unique_constraint.py │ │ │ │ ├── 0014_add_field_with_index.py │ │ │ │ ├── 0015_drop_field_with_index.py │ │ │ │ ├── 0016_add_check_constraint.py │ │ │ │ ├── 0017_drop_check_constraint.py │ │ │ │ ├── 0018_add_unique_constraint.py │ │ │ │ ├── 0019_drop_unique_constraint.py │ │ │ │ ├── 0020_add_unique_constraint_with_condition.py │ │ │ │ ├── 0021_drop_unique_constraint_with_condition.py │ │ │ │ ├── 0022_add_index.py │ │ │ │ ├── 0023_drop_index.py │ │ │ │ ├── 0024_add_index_with_condition.py │ │ │ │ ├── 0025_drop_index_with_condition.py │ │ │ │ ├── 0026_add_brin_index.py │ │ │ │ ├── 0027_drop_brin_index.py │ │ │ │ ├── 0028_add_brin_index_with_condition.py │ │ │ │ ├── 0029_drop_brin_index_with_condition.py │ │ │ │ ├── 0030_add_btree_index.py │ │ │ │ ├── 0031_drop_btree_index.py │ │ │ │ ├── 0032_add_btree_index_with_condition.py │ │ │ │ ├── 0033_drop_btree_index_with_condition.py │ │ │ │ ├── 0034_add_gin_index.py │ │ │ │ ├── 0035_drop_gin_index.py │ │ │ │ ├── 0036_add_gin_index_with_condition.py │ │ │ │ ├── 0037_drop_gin_index_with_condition.py │ │ │ │ ├── 0038_add_gist_index.py │ │ │ │ ├── 0039_drop_gist_index.py │ │ │ │ ├── 0040_add_gist_index_with_condition.py │ │ │ │ ├── 0041_drop_gist_index_with_condition.py │ │ │ │ ├── 0042_add_hash_index.py │ │ │ │ ├── 0043_drop_hash_index.py │ │ │ │ ├── 0044_add_hash_index_with_condition.py │ │ │ │ ├── 0045_drop_hash_index_with_condition.py │ │ │ │ ├── 0046_add_spgist_index.py │ │ │ │ ├── 0047_drop_spgist_index.py │ │ │ │ ├── 0048_add_spgist_index_with_condition.py │ │ │ │ ├── 0049_drop_spgist_index_with_condition.py │ │ │ │ ├── 0050_add_unique_constraint_deferrable.py │ │ │ │ ├── 0051_drop_unique_constraint_deferrable.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── good_flow_app_concurrently/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_auto_20191210_2147.py │ │ │ │ ├── 0003_auto_20191210_2148.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── good_flow_drop_column_with_constraints/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_remove_testtablemain_drop_col_u1_and_more.py │ │ │ │ ├── 0003_remove_testtablemain_field_i7.py │ │ │ │ ├── 0004_remove_testtablemain_field_i6.py │ │ │ │ ├── 0005_remove_testtablemain_field_i5.py │ │ │ │ ├── 0006_remove_testtablemain_field_i4.py │ │ │ │ ├── 0007_remove_testtablemain_field_i3.py │ │ │ │ ├── 0008_remove_testtablemain_field_i2.py │ │ │ │ ├── 0009_remove_testtablemain_field_i1.py │ │ │ │ ├── 0010_remove_testtablemain_field_u7.py │ │ │ │ ├── 0011_remove_testtablemain_field_u6.py │ │ │ │ ├── 0012_remove_testtablemain_field_u5.py │ │ │ │ ├── 0013_remove_testtablemain_field_u4.py │ │ │ │ ├── 0014_remove_testtablemain_field_u3.py │ │ │ │ ├── 0015_remove_testtablemain_field_u2.py │ │ │ │ ├── 0016_remove_testtablemain_field_u1.py │ │ │ │ ├── 0017_remove_testtablemain_main_id.py │ │ │ │ ├── 0018_remove_testtablemain_parent.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── good_flow_drop_column_with_constraints_old/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_remove_testtablemain_drop_col_u2_and_more.py │ │ │ │ ├── 0003_remove_testtablemain_field_i7.py │ │ │ │ ├── 0004_remove_testtablemain_field_i6.py │ │ │ │ ├── 0005_remove_testtablemain_field_i5.py │ │ │ │ ├── 0006_remove_testtablemain_field_i4.py │ │ │ │ ├── 0007_remove_testtablemain_field_i3.py │ │ │ │ ├── 0008_remove_testtablemain_field_i2.py │ │ │ │ ├── 0009_remove_testtablemain_field_i1.py │ │ │ │ ├── 0010_remove_testtablemain_field_u7.py │ │ │ │ ├── 0011_remove_testtablemain_field_u5.py │ │ │ │ ├── 0012_remove_testtablemain_field_u2.py │ │ │ │ ├── 0013_remove_testtablemain_main_id.py │ │ │ │ ├── 0014_remove_testtablemain_parent.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── good_flow_drop_table_with_constraints/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_remove_testtablechild_main.py │ │ │ │ ├── 0003_delete_testtablemain.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_auto_field_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_alter_relatedtesttable_test_field_int.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_check_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_relatedtesttable_idempotency_add_check_app_relatedtesttable_check.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_column_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_relatedtesttable_test_field_str.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_column_foreign_key_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_relatedtesttable_test_model.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_column_one_to_one_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_relatedtesttable_test_model.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_foreign_key_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_alter_relatedtesttable_test_field_int.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_index_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_alter_relatedtesttable_test_field_int.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_index_meta_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_relatedtesttable_relatedtesttable_idx.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_one_to_one_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_alter_relatedtesttable_test_field_int.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_primary_key_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_remove_relatedtesttable_id_and_more.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_unique_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_alter_relatedtesttable_test_field_int.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_add_unique_meta_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_relatedtesttable_relatedtesttable_uniq.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_create_table_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_relatedtesttable_and_more.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ ├── idempotency_set_not_null_app/ │ │ │ ├── __init__.py │ │ │ ├── migrations/ │ │ │ │ ├── 0001_initial.py │ │ │ │ ├── 0002_alter_relatedtesttable_test_field_int.py │ │ │ │ └── __init__.py │ │ │ └── models.py │ │ └── old_notnull_check_constraint_migration_app/ │ │ ├── __init__.py │ │ ├── migrations/ │ │ │ ├── 0001_initial.py │ │ │ └── __init__.py │ │ └── models.py │ ├── integration/ │ │ ├── __init__.py │ │ └── test_migrations.py │ ├── settings.py │ ├── settings_make_migrations.py │ └── unit/ │ ├── __init__.py │ └── test_schema.py └── tox.ini ================================================ FILE CONTENTS ================================================ ================================================ FILE: .github/FUNDING.yml ================================================ github: tbicr ================================================ FILE: .github/ISSUE_TEMPLATE/bug_report.md ================================================ --- name: Bug report about: Create a report to help us improve title: '' labels: '' assignees: '' --- **Describe the bug** **To Reproduce** 1. What model did you have? 2. How did you change the model? 3. What migration were generated? 4. What SQL was executed? 5. What issue did you get? **Expected behavior** **Versions:** - Postgres: - Python: - Django: - django-pg-zero-downtime-migrations library: ================================================ FILE: .github/workflows/check.yml ================================================ name: Check on: [push, pull_request] jobs: build: runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@v2 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: build and push uses: docker/build-push-action@v6 with: tags: django-pg-zero-downtime-migrations:latest outputs: type=docker,dest=${{ runner.temp }}/django-pg-zero-downtime-migrations-image.tar cache-from: type=gha cache-to: type=gha,mode=max - name: Upload artifact uses: actions/upload-artifact@v4 with: name: django-pg-zero-downtime-migrations-image path: ${{ runner.temp }}/django-pg-zero-downtime-migrations-image.tar check: needs: build strategy: fail-fast: false matrix: tox-filter: - "py3.8" - "py3.9" - "py3.10" - "py3.11" - "py3.12" - "py3.13" name: run checks ${{ matrix.tox-filter }} runs-on: ubuntu-latest steps: - name: checkout code uses: actions/checkout@v2 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Download artifact uses: actions/download-artifact@v4 with: name: django-pg-zero-downtime-migrations-image path: ${{ runner.temp }} - name: Load Docker image run: | docker load --input ${{ runner.temp }}/django-pg-zero-downtime-migrations-image.tar docker image ls -a - name: pull DB images run: docker compose pull --quiet --ignore-buildable - name: run checks run: docker compose run --rm django-pg-zero-downtime-migrations-tests tox -f ${{ matrix.tox-filter }} ================================================ FILE: .github/workflows/publish.yml ================================================ name: Build and Publish on: workflow_dispatch jobs: build-and-publish: name: build and publish package runs-on: ubuntu-latest steps: - name: checkout code uses: actions/checkout@v2 - name: set up python uses: actions/setup-python@v2 with: python-version: '3.x' - name: install pypa/build run: python -m pip install build --user - name: build a binary wheel and a source tarball run: python -m build --sdist --wheel --outdir dist/ - name: publish distribution to PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: password: ${{ secrets.PYPI_API_TOKEN }} ================================================ FILE: .gitignore ================================================ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # pyenv .python-version # celery beat schedule file celerybeat-schedule # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ ================================================ FILE: .pre-commit-config.yaml ================================================ - repo: https://github.com/pre-commit/mirrors-prettier rev: "" # Use the sha or tag you want to point at hooks: - id: prettier ================================================ FILE: AUTHORS ================================================ maintainer: Paveł Tyślacki ================================================ FILE: CHANGES.md ================================================ # django-pg-zero-downtime-migrations Changelog ## 0.19 - added django 5.2 support - parallelize CI to run one job per Python version ## 0.18 - fixed unique constraint creation with the `deferrable` parameter - split CI into smaller jobs ## 0.17 - added django 5.1 support - added python 3.13 support - added postgres 17 support - marked postgres 12 support as deprecated - marked postgres 13 support as deprecated - dropped django 3.2 support - dropped django 4.0 support - dropped django 4.1 support - dropped python 3.6 support - dropped python 3.7 support - dropped `migrate_isnotnull_check_constraints` command ## 0.16 - changed `ADD COLUMN DEFAULT NULL` to a safe operation for code defaults - changed `ADD COLUMN DEFAULT NOT NULL` to a safe operation for `db_default` in django 5.0+ - added the `ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT` setting and changed `ADD COLUMN DEFAULT NOT NULL` with this setting to a safe operation for django < 5.0 - added the `ZERO_DOWNTIME_MIGRATIONS_EXPLICIT_CONSTRAINTS_DROP` setting and enabled dropping constraints and indexes before dropping a column or table - fixed `sqlmigrate` in idempotent mode - fixed unique constraint creation with the `include` parameter - fixed idempotent mode tests - updated unsafe migration links to the documentation - updated patched code to the latest django version - updated test image to ubuntu 24.04 - improved README ## 0.15 - added idempotent mode and the `ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL` setting - fixed django 3.2 degradation due to the missing `skip_default_on_alter` method - improved README - updated the release github action ## 0.14 - fixed deferred sql errors - added django 5.0 support - added python 3.12 support - added postgres 16 support - dropped postgres 11 support - removed the `ZERO_DOWNTIME_MIGRATIONS_USE_NOT_NULL` setting - marked the `migrate_isnotnull_check_constraints` command as deprecated ## 0.13 - added django 4.2 support - marked django 3.2 support as deprecated - marked django 4.0 support as deprecated - marked django 4.1 support as deprecated - marked postgres 11 support as deprecated - dropped postgres 10 support - updated the test docker image to ubuntu 22.04 ## 0.12 - added support for `serial` and `integer`, `bigserial` and `bigint`, as well as `smallserial` and `smallint`, implementing the same type changes as safe migrations - fixed the `AutoField` type change and concurrent insertion issue for django < 4.1 - added sequence dropping and creation timeouts, as they can be used with the `CASCADE` keyword and may affect other tables - added django 4.1 support - added python 3.11 support - added postgres 15 support - marked postgres 10 support as deprecated - dropped django 2.2 support - dropped django 3.0 support - dropped django 3.1 support - dropped postgres 9.5 support - dropped postgres 9.6 support - added github actions checks for pull requests ## 0.11 - fixed an issue where renaming a model while keeping `db_table` raised an `ALTER_TABLE_RENAME` error (#26) - added django 3.2 support - added django 4.0 support - added python 3.9 support - added python 3.10 support - added postgres 14 support - marked django 2.2 support as deprecated - marked django 3.0 support as deprecated - marked django 3.1 support as deprecated - marked python 3.6 support as deprecated - marked python 3.7 support as deprecated - marked postgres 9.5 support as deprecated - marked postgres 9.6 support as deprecated - switched to github actions for testing ## 0.10 - added django 3.1 support - added postgres 13 support - dropped python 3.5 support - updated the test environment ## 0.9 - fixed the decimal-to-float migration error - fixed tests for django 3.0.2 and later ## 0.8 - added django 3.0 support - added support for concurrent index creation and removal operations - added support for exclude constraints as an unsafe operation - dropped postgres 9.4 support - dropped django 2.0 support - dropped django 2.1 support - removed the deprecated `django_zero_downtime_migrations_postgres_backend` module ## 0.7 - added python 3.8 support - added support for postgres-specific indexes - improved test clarity - fixed regexp escaping warnings in the management command - fixed style checks - improved README - marked python 3.5 support as deprecated - marked postgres 9.4 support as deprecated - marked django 2.0 support as deprecated - marked django 2.1 support as deprecated ## 0.6 - marked the `ZERO_DOWNTIME_MIGRATIONS_USE_NOT_NULL` option as deprecated for postgres 12+ - added a management command for migrating from a `CHECK IS NOT NULL` constraint to a real `NOT NULL` constraint - added integration tests for postgres 12, postgres 11 (root), postgres 11 with compatible not null constraints, postgres 11 with standard not null constraints, as well as postgres 10, 9.6, 9.5, 9.4, and postgis databases - fixed bugs related to the deletion and creation of compatible check not null constraints via `pg_attribute` - minimized side effects with deferred sql execution between operations in one migration module - added safe `NOT NULL` constraint creation for postgres 12 - added safe `NOT NULL` constraint creation for extra permissions for `pg_catalog.pg_attribute` when the `ZERO_DOWNTIME_MIGRATIONS_USE_NOT_NULL=USE_PG_ATTRIBUTE_UPDATE_FOR_SUPERUSER` option is enabled - marked `AddField` with the `null=False` parameter and the compatible `CHECK IS NOT NULL` constraint option as an unsafe operation, ignoring the `ZERO_DOWNTIME_MIGRATIONS_USE_NOT_NULL` value in this case - added versioning to the package - fixed pypi README image links - improved README ## 0.5 - extracted zero-downtime-schema logic into a mixin to allow using it with other backends - moved the module from `django_zero_downtime_migrations_postgres_backend` to `django_zero_downtime_migrations.backends.postgres` - marked the `django_zero_downtime_migrations_postgres_backend` module as deprecated - added support for the postgis backend - improved README ## 0.4 - changed the defaults for `ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT` and `ZERO_DOWNTIME_MIGRATIONS_STATEMENT_TIMEOUT` from `0ms` to `None` to match the default django behavior that respects postgres timeouts - updated the documentation with option defaults - updated the documentation with best practices for option usage - fixed the issue where adding a nullable field with a default did not raise an error or warning - added links to the documentation describing the issue and safe alternative usage for errors and warnings - updated the documentation with type casting workarounds ## 0.3 - added django 2.2 support with the `Meta.indexes` and `Meta.constraints` attributes - fixed python deprecation warnings for regular expressions - removed the unused `TimeoutException` - improved README and PYPI description ## 0.2 - added an option to disable `statement_timeout` for long operations, such as index creation and constraint validation, when `statement_timeout` is set globally ## 0.1.1 - added long description content type ## 0.1 - replaced default sql queries with safer alternatives - added options for `statement_timeout` and `lock_timeout` - added an option for `NOT NULL` constraint behavior - added an option for restricting unsafe operations ================================================ FILE: Dockerfile ================================================ FROM --platform=linux/amd64 ubuntu:24.04 ENV LC_ALL=C.UTF-8 ENV LANG=C.UTF-8 ENV DEBIAN_FRONTEND=noninteractive RUN apt-get update && \ apt-get install -q -y --no-install-recommends software-properties-common git gpg-agent curl && \ add-apt-repository ppa:deadsnakes/ppa && \ echo "deb http://apt.postgresql.org/pub/repos/apt noble-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \ curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc > /etc/apt/trusted.gpg.d/apt.postgresql.org.asc && \ apt-get update && \ apt-get install -q -y --no-install-recommends \ python3.8 python3.8-distutils \ python3.9 python3.9-distutils \ python3.10 python3.10-distutils \ python3.11 python3.11-distutils \ python3.12 \ python3.13 \ python3-pip \ libgdal34 \ postgresql-client-17 && \ rm -rf /var/lib/apt/lists/* && \ pip3 install --break-system-packages setuptools tox ADD . /app WORKDIR /app CMD ["/bin/bash"] ================================================ FILE: LICENSE ================================================ MIT License Copyright (c) 2018 Paveł Tyślacki Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ================================================ FILE: MANIFEST.in ================================================ include README.md include CHANGES.md include AUTHORS include LICENSE global-exclude __pycache__ global-exclude *.py[co] ================================================ FILE: README.md ================================================ [![PyPI](https://img.shields.io/pypi/v/django-pg-zero-downtime-migrations.svg)](https://pypi.org/project/django-pg-zero-downtime-migrations/) ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/django-pg-zero-downtime-migrations.svg) ![PyPI - Django Version](https://img.shields.io/pypi/djversions/django-pg-zero-downtime-migrations.svg?label=django) ![Postgres Version](https://img.shields.io/badge/postgres-12%20|%2013%20|%2014%20|%2015%20|%2016%20|%2017%20-blue.svg) [![PyPI - License](https://img.shields.io/pypi/l/django-pg-zero-downtime-migrations.svg)](https://raw.githubusercontent.com/tbicr/django-pg-zero-downtime-migrations/master/LICENSE) [![PyPI - Downloads](https://img.shields.io/pypi/dm/django-pg-zero-downtime-migrations.svg)](https://pypistats.org/packages/django-pg-zero-downtime-migrations) [![GitHub last commit](https://img.shields.io/github/last-commit/tbicr/django-pg-zero-downtime-migrations/master.svg)](https://github.com/tbicr/django-pg-zero-downtime-migrations/commits/master) [![Build Status](https://github.com/tbicr/django-pg-zero-downtime-migrations/actions/workflows/check.yml/badge.svg?branch=master)](https://github.com/tbicr/django-pg-zero-downtime-migrations/actions) # django-pg-zero-downtime-migrations Django postgresql backend that apply migrations with respect to database locks. ## Installation pip install django-pg-zero-downtime-migrations ## Usage To enable zero downtime migrations for postgres just setup django backend provided by this package and add most safe settings: DATABASES = { 'default': { 'ENGINE': 'django_zero_downtime_migrations.backends.postgres', #'ENGINE': 'django_zero_downtime_migrations.backends.postgis', ... } } ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT = '2s' ZERO_DOWNTIME_MIGRATIONS_STATEMENT_TIMEOUT = '2s' ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT = True ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE = True > _NOTE:_ this backend brings zero downtime improvements only for migrations (schema and `RunSQL` operations, but not for `RunPython` operation), for other purpose it works the same as standard django backend. > _NOTE:_ this package is in beta, please check your migrations SQL before applying on production and submit issue for any question. ### Differences with standard django backend This backend provides same result state (except of `ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT=True` usage for django < 5.0), but different way and with additional guarantees for avoiding stuck table locks. This backend doesn't use transactions for migrations (except `RunPython` operation), because not all SQL fixes can be run in transaction and it allows to avoid deadlocks for complex migration. So when your migration will down in the middle of migration file operations you need to fix db state manually (instead potential downtime). For that reason good practice to make migration modules small as possible. Also `ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True` allows to automate manual db state fixing. ### Deployment flow There are requirements for zero downtime deployment: 1. We have one database; 1. We have several instances with application - application always should be available, even you restart one of instances; 1. We have balancer before instances; 1. Our application works fine before, on and after migration - old application works fine with old and new database schema version; 1. Our application works fine before, on and after instance updating - old and new application versions work fine with new database schema version. ![deployment timeline](images/timeline.png "deployment timeline") Flow: 1. apply migrations 1. disconnect instance form balancer, restart it and back to balancer - repeat this operation one by one for all instances If our deployment don't satisfy zero downtime deployment rules, then we split it to smaller deployments. ![deployment flow](images/deployment.gif "deployment flow") ### Settings #### ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT Apply [`lock_timeout`](https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-LOCK-TIMEOUT) for SQL statements that require `ACCESS EXCLUSIVE` lock, default `None`: ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT = '2s' Allowed values: - `None` - current postgres setting used - other - timeout will be applied, `0` and equivalents mean that timeout will be disabled #### ZERO_DOWNTIME_MIGRATIONS_STATEMENT_TIMEOUT Apply [`statement_timeout`](https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-STATEMENT-TIMEOUT) for SQL statements that require `ACCESS EXCLUSIVE` lock, default `None`: ZERO_DOWNTIME_MIGRATIONS_STATEMENT_TIMEOUT = '2s' Allowed values: - `None` - current postgres setting used - other - timeout will be applied, `0` and equivalents mean that timeout will be disabled #### ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT Set [`statement_timeout`](https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-STATEMENT-TIMEOUT) to `0ms` for SQL statements that require `SHARE UPDATE EXCLUSIVE` lock that useful in case when `statement_timeout` enabled globally and you try run long-running operations like index creation or constraint validation, default `False`: ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT = True #### ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE Enabled option doesn't allow run potential unsafe migration, default `False`: ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE = True #### ZERO_DOWNTIME_DEFERRED_SQL Define way to apply deferred sql, default `True`: ZERO_DOWNTIME_DEFERRED_SQL = True Allowed values: - `True` - run deferred sql similar to default django way - `False` - run deferred sql as soon as possible #### ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL Define idempotent mode, default `False`: ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL = False Allowed values: - `True` - skip already applied sql migrations - `False` - standard non atomic django behaviour As this backend doesn't use transactions for migrations any failed migration can be cause of stopped process in intermediate state. To avoid manual schema manipulation idempotent mode allows to rerun failed migration after fixed issue (eg. data issue or long running CRUD queries). > _NOTE:_ idempotent mode checks rely only on name and index and constraint valid state, so it can ignore name collisions and recommended do not use it for CI checks. #### ZERO_DOWNTIME_MIGRATIONS_EXPLICIT_CONSTRAINTS_DROP Define way to drop foreign key, unique constraints and indexes before drop table or column, default `True`: ZERO_DOWNTIME_MIGRATIONS_EXPLICIT_CONSTRAINTS_DROP = True Allowed values: - `True` - before dropping table drop all foreign keys related to this table and before dropping column drop all foreign keys related to this column, unique constraints on this column and indexes used this column. - `False` - standard django behaviour that will drop constraints with `CASCADE` mode (some constraints can be dropped explicitly too). Explicitly dropping constraints and indexes before dropping tables or columns allows for splitting schema-only changes with an `ACCESS EXCLUSIVE` lock and the deletion of physical files, which can take significant time and cause downtime. #### ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT Define way keep or drop code defaults on database level when adding new column, default `False`: ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT = False Allowed values: - `True` - after adding column with code default this default will not be dropped, this option allows to use `ALTER TABLE ADD COLUMN SET DEFAULT NOT NULL` as safe operation that much more simple and efficient than creating column without default on database level and populating column next - `False` - after adding column with code default this default will be dropped, this is standard django behaviour > _NOTE:_ this option works only for django < 5.0, in django 5.0+ explicit [`db_default`](https://docs.djangoproject.com/en/dev/ref/models/fields/#db-default) should be used instead. #### PgBouncer and timeouts In case you using [PgBouncer](https://www.pgbouncer.org/) and expect timeouts will work as expected you need make sure that run migrations using [session pool_mode](https://www.pgbouncer.org/config.html#pool_mode) or use direct database connection. ## How it works ### Postgres table level locks Postgres has different locks on table level that can conflict with each other https://www.postgresql.org/docs/current/static/explicit-locking.html#LOCKING-TABLES: | | `ACCESS SHARE` | `ROW SHARE` | `ROW EXCLUSIVE` | `SHARE UPDATE EXCLUSIVE` | `SHARE` | `SHARE ROW EXCLUSIVE` | `EXCLUSIVE` | `ACCESS EXCLUSIVE` | | ------------------------ | :------------: | :---------: | :-------------: | :----------------------: | :-----: | :-------------------: | :---------: | :----------------: | | `ACCESS SHARE` | | | | | | | | X | | `ROW SHARE` | | | | | | | X | X | | `ROW EXCLUSIVE` | | | | | X | X | X | X | | `SHARE UPDATE EXCLUSIVE` | | | | X | X | X | X | X | | `SHARE` | | | X | X | | X | X | X | | `SHARE ROW EXCLUSIVE` | | | X | X | X | X | X | X | | `EXCLUSIVE` | | X | X | X | X | X | X | X | | `ACCESS EXCLUSIVE` | X | X | X | X | X | X | X | X | ### Migration and business logic locks Lets split this lock to migration and business logic operations. - Migration operations work synchronously in one thread and cover schema migrations (data migrations conflict with business logic operations same as business logic conflict concurrently). - Business logic operations work concurrently. #### Migration locks | lock | operations | | ------------------------ | ----------------------------------------------------------------------------------------------------- | | `ACCESS EXCLUSIVE` | `CREATE SEQUENCE`, `DROP SEQUENCE`, `CREATE TABLE`, `DROP TABLE` \*, `ALTER TABLE` \*\*, `DROP INDEX` | | `SHARE` | `CREATE INDEX` | | `SHARE UPDATE EXCLUSIVE` | `CREATE INDEX CONCURRENTLY`, `DROP INDEX CONCURRENTLY`, `ALTER TABLE VALIDATE CONSTRAINT` \*\*\* | \*: `CREATE SEQUENCE`, `DROP SEQUENCE`, `CREATE TABLE`, `DROP TABLE` shouldn't have conflicts, because your business logic shouldn't yet operate with created tables and shouldn't already operate with deleted tables. \*\*: Not all `ALTER TABLE` operations take `ACCESS EXCLUSIVE` lock, but all current django's migrations take it https://github.com/django/django/blob/master/django/db/backends/base/schema.py, https://github.com/django/django/blob/master/django/db/backends/postgresql/schema.py and https://www.postgresql.org/docs/current/static/sql-altertable.html. \*\*\*: Django doesn't have `VALIDATE CONSTRAINT` logic, but we will use it for some cases. #### Business logic locks | lock | operations | conflict with lock | conflict with operations | | --------------- | ---------------------------- | --------------------------------------------------------------- | ------------------------------------------- | | `ACCESS SHARE` | `SELECT` | `ACCESS EXCLUSIVE` | `ALTER TABLE`, `DROP INDEX` | | `ROW SHARE` | `SELECT FOR UPDATE` | `ACCESS EXCLUSIVE`, `EXCLUSIVE` | `ALTER TABLE`, `DROP INDEX` | | `ROW EXCLUSIVE` | `INSERT`, `UPDATE`, `DELETE` | `ACCESS EXCLUSIVE`, `EXCLUSIVE`, `SHARE ROW EXCLUSIVE`, `SHARE` | `ALTER TABLE`, `DROP INDEX`, `CREATE INDEX` | So you can find that all django schema changes for exist table conflicts with business logic, but fortunately they are safe or has safe alternative in general. ### Postgres row level locks As business logic mostly works with table rows it's also important to understand lock conflicts on row level https://www.postgresql.org/docs/current/static/explicit-locking.html#LOCKING-ROWS: | lock | `FOR KEY SHARE` | `FOR SHARE` | `FOR NO KEY UPDATE` | `FOR UPDATE` | | ------------------- | :-------------: | :---------: | :-----------------: | :----------: | | `FOR KEY SHARE` | | | | X | | `FOR SHARE` | | | X | X | | `FOR NO KEY UPDATE` | | X | X | X | | `FOR UPDATE` | X | X | X | X | Main point there is if you have two transactions that update one row, then second transaction will wait until first will be completed. So for business logic and data migrations better to avoid updates for whole table and use batch operations instead. > _NOTE:_ batch operations also can work faster because postgres can use more optimal execution plan with indexes for small data range. ### Transactions FIFO waiting ![postgres FIFO](images/fifo-diagram.png "postgres FIFO") Found same diagram in interesting article http://pankrat.github.io/2015/django-migrations-without-downtimes/. In this diagram we can extract several metrics: 1. operation time - time spent changing schema, in the case of long running operations on many rows tables like `CREATE INDEX` or `ALTER TABLE ADD CONSTRAINT`, so you need a safe equivalent. 2. waiting time - your migration will wait until all transactions complete, so there is issue for long running operations/transactions like analytic, so you need avoid it or disable during migration. 3. queries per second + execution time and connections pool - if executing many queries, especially long running ones, they can consume all available database connections until the lock is released, so you need different optimizations there: run migrations when least busy, decrease query count and execution time, split data. 4. too many operations in one transaction - you have issues in all previous points for one operation so if you have many operations in one transaction then you have more likelihood to get this issue, so you need avoid too many simultaneous operations in a single transaction (or even not run it in a transaction at all but being careful when an operation fails). ### Dealing with timeouts Postgres has two settings to dealing with `waiting time` and `operation time` presented in diagram: `lock_timeout` and `statement_timeout`. `SET lock_timeout TO '2s'` allow you to avoid downtime when you have long running query/transaction before run migration (https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-LOCK-TIMEOUT). `SET statement_timeout TO '2s'` allow you to avoid downtime when you have long running migration query (https://www.postgresql.org/docs/current/static/runtime-config-client.html#GUC-STATEMENT-TIMEOUT). ### Deadlocks There no downtime issues for deadlocks, but too many operations in one transaction can take most conflicted lock and release it only after transaction commit or rollback. So it's a good idea to avoid `ACCESS EXCLUSIVE` lock operations and long time operations in one transaction. Deadlocks also can make you migration stuck on production deployment when different tables will be locked, for example, for FOREIGN KEY that take `ACCESS EXCLUSIVE` lock for two tables. ### Rows and values storing Postgres store values of different types different ways. If you try to convert one type to another and it stored different way then postgres will rewrite all values. Fortunately some types stored same way and postgres need to do nothing to change type, but in some cases postgres need to check that all values have same with new type limitations, for example string length. ### Multiversion Concurrency Control Regarding documentation https://www.postgresql.org/docs/current/static/mvcc-intro.html data consistency in postgres is maintained by using a multiversion model. This means that each SQL statement sees a snapshot of data. It has advantage for adding and deleting columns without any indexes, CONSTRAINTS and defaults do not change exist data, new version of data will be created on `INSERT` and `UPDATE`, delete just mark you record expired. All garbage will be collected later by `VACUUM` or `AUTO VACUUM`. ### Django migrations hacks Any schema changes can be processed with creation of new table and copy data to it, but it can take significant time. | # | name | safe | safe alternative | description | | --: |-----------------------------------------------|:----:|:-----------------------------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | 1 | `CREATE SEQUENCE` | X | | safe operation, because your business logic shouldn't operate with new sequence on migration time \* | | 2 | `DROP SEQUENCE` | X | | safe operation, because your business logic shouldn't operate with this sequence on migration time \* | | 3 | `CREATE TABLE` | X | | safe operation, because your business logic shouldn't operate with new table on migration time \* | | 4 | `DROP TABLE` | X | | safe operation, because your business logic shouldn't operate with this table on migration time \* | | 5 | `ALTER TABLE RENAME TO` | | use updatable view | **unsafe operation**, because it's too hard write business logic that operate with two tables simultaneously, so propose to use temporary updatable view and switch names in transaction \* | | 6 | `ALTER TABLE SET TABLESPACE` | | add new table and copy data | **unsafe operation**, but probably you don't need it at all or often \* | | 7 | `ALTER TABLE ADD COLUMN` | X | | safe operation if without `SET NOT NULL`, `PRIMARY KEY`, `UNIQUE` \* | | 8 | `ALTER TABLE ADD COLUMN SET DEFAULT` | X | | safe operation, however it can be unsafe if code default used within `NOT NULL`, for `db_default` or `NULL` there are no issue \* | | 9 | `ALTER TABLE ADD COLUMN SET NOT NULL` | | +/- | **unsafe operation**, because doesn't work without `SET DEFAULT` or after migration old code can insert rows without new column and raise exception, so propose to use `ALTER TABLE ADD COLUMN SET DEFAULT` with `db_default` or `ALTER TABLE ADD COLUMN` and then populate column and then `ALTER TABLE ALTER COLUMN SET NOT NULL` \* and \*\* | | 10 | `ALTER TABLE ADD COLUMN PRIMARY KEY` | | add index and add constraint | **unsafe operation**, because you spend time in migration to `CREATE INDEX`, so propose `ALTER TABLE ADD COLUMN` and then `CREATE INDEX CONCURRENTLY` and then `ALTER TABLE ADD CONSTRAINT PRIMARY KEY USING INDEX` \*\*\* | | 11 | `ALTER TABLE ADD COLUMN UNIQUE` | | add index and add constraint | **unsafe operation**, because you spend time in migration to `CREATE INDEX`, so propose `ALTER TABLE ADD COLUMN` and then `CREATE INDEX CONCURRENTLY` and then `ALTER TABLE ADD CONSTRAINT UNIQUE USING INDEX` \*\*\* | | 12 | `ALTER TABLE ALTER COLUMN TYPE` | | +/- | **unsafe operation**, because you spend time in migration to check that all items in column valid or to change type, but some operations can be safe \*\*\*\* | | 13 | `ALTER TABLE ALTER COLUMN SET NOT NULL` | | add check constraint before | **unsafe operation**, because you spend time in migration to check that all items in column `NOT NULL`, so propose `ALTER TABLE ADD CONSTRAINT CHECK` and then `ALTER TABLE VALIDATE CONSTRAINT` and then `ALTER TABLE ALTER COLUMN SET NOT NULL` *\* | | 14 | `ALTER TABLE ALTER COLUMN DROP NOT NULL` | X | | safe operation | | 15 | `ALTER TABLE ALTER COLUMN SET DEFAULT` | X | | safe operation | | 16 | `ALTER TABLE ALTER COLUMN DROP DEFAULT` | X | | safe operation | | 17 | `ALTER TABLE DROP COLUMN` | X | | safe operation, because your business logic shouldn't operate with this column on migration time, however better `ALTER TABLE ALTER COLUMN DROP NOT NULL`, `ALTER TABLE DROP CONSTRAINT` and `DROP INDEX` before \* and \*\*\*\*\* | | 18 | `ALTER TABLE RENAME COLUMN` | | use updatable view | **unsafe operation**, because it's too hard write business logic that operate with two columns simultaneously, so propose to use temporary updatable view and switch names in transaction \* | | 19 | `ALTER TABLE ADD CONSTRAINT CHECK` | | add as not valid and validate | **unsafe operation**, because you spend time in migration to check constraint | | 20 | `ALTER TABLE DROP CONSTRAINT` (`CHECK`) | X | | safe operation | | 21 | `ALTER TABLE ADD CONSTRAINT FOREIGN KEY` | | add as not valid and validate | **unsafe operation**, because you spend time in migration to check constraint, lock two tables | | 22 | `ALTER TABLE DROP CONSTRAINT` (`FOREIGN KEY`) | X | | safe operation, lock two tables | | 23 | `ALTER TABLE ADD CONSTRAINT PRIMARY KEY` | | add index and add constraint | **unsafe operation**, because you spend time in migration to create index \*\*\* | | 24 | `ALTER TABLE DROP CONSTRAINT` (`PRIMARY KEY`) | X | | safe operation \*\*\* | | 25 | `ALTER TABLE ADD CONSTRAINT UNIQUE` | | add index and add constraint | **unsafe operation**, because you spend time in migration to create index \*\*\* | | 26 | `ALTER TABLE DROP CONSTRAINT` (`UNIQUE`) | X | | safe operation \*\*\* | | 27 | `ALTER TABLE ADD CONSTRAINT EXCLUDE` | | add new table and copy data | | | 28 | `ALTER TABLE DROP CONSTRAINT (EXCLUDE)` | X | | | | 29 | `CREATE INDEX` | | `CREATE INDEX CONCURRENTLY` | **unsafe operation**, because you spend time in migration to create index | | 30 | `DROP INDEX` | X | `DROP INDEX CONCURRENTLY` | safe operation \*\*\* | | 31 | `CREATE INDEX CONCURRENTLY` | X | | safe operation | | 32 | `DROP INDEX CONCURRENTLY` | X | | safe operation \*\*\* | \*: main point with migration on production without downtime that your old and new code should correctly work before and after migration, lets look this point closely in [Dealing with logic that should work before and after migration](#dealing-with-logic-that-should-work-before-and-after-migration) section. \*\*: postgres will check that all items in column `NOT NULL` that take time, lets look this point closely in [Dealing with `NOT NULL` constraint](#dealing-with-not-null-constraint) section. \*\*\*: postgres will have same behaviour when you skip `ALTER TABLE ADD CONSTRAINT UNIQUE USING INDEX` and still unclear difference with `CONCURRENTLY` except difference in locks, lets look this point closely in [Dealing with `UNIQUE` constraint](#dealing-with-unique-constraint). \*\*\*\*: lets look this point closely in [Dealing with `ALTER TABLE ALTER COLUMN TYPE`](#dealing-with-alter-table-alter-column-type) section. \*\*\*\*\*: if you check migration on CI with `python manage.py makemigrations --check` you can't drop column in code without migration creation, so in this case you can be useful _back migration flow_: apply code on all instances and then migrate database #### Dealing with logic that should work before and after migration ##### Adding and removing models and columns Migrations: `CREATE SEQUENCE`, `DROP SEQUENCE`, `CREATE TABLE`, `DROP TABLE`, `ALTER TABLE ADD COLUMN`, `ALTER TABLE DROP COLUMN`. This migrations are pretty safe, because your logic doesn't work with this data before migration ##### Rename models Migrations: `ALTER TABLE RENAME TO`. Standard django's approach does not allow to operate simultaneously for old and new code with old and new table name, hopefully next workaround allows to rename table by splitting migration to few steps: 1. provide code changes but replace standard migration with [SeparateDatabaseAndState](https://docs.djangoproject.com/en/dev/ref/migration-operations/#separatedatabaseandstate) sql operation that **in transaction** rename table and create [updatable view](https://www.postgresql.org/docs/current/sql-createview.html#SQL-CREATEVIEW-UPDATABLE-VIEWS) that has old table name - old code can work with [updatable view](https://www.postgresql.org/docs/current/sql-createview.html#SQL-CREATEVIEW-UPDATABLE-VIEWS) by old name - new code can work with table by new name 2. after new code deployment old code is not used anymore, so we can drop view - new code can work with renamed table ##### Rename columns Migrations: `ALTER TABLE RENAME COLUMN`. Standard django's approach does not allow to operate simultaneously for old and new code with old and new column name, hopefully next workaround allows to rename column by splitting migration to few steps: 1. provide code changes but replace standard migration with [SeparateDatabaseAndState](https://docs.djangoproject.com/en/dev/ref/migration-operations/#separatedatabaseandstate) sql operation that **in transaction** rename column, rename table to temporary and create [updatable view](https://www.postgresql.org/docs/current/sql-createview.html#SQL-CREATEVIEW-UPDATABLE-VIEWS) that has old table name with both old and new columns - old code can work with new [updatable view](https://www.postgresql.org/docs/current/sql-createview.html#SQL-CREATEVIEW-UPDATABLE-VIEWS) and use old column - new code can work with new [updatable view](https://www.postgresql.org/docs/current/sql-createview.html#SQL-CREATEVIEW-UPDATABLE-VIEWS) and use new column 2. after new code deployment old code is not used anymore, so **in transaction** we can drop view and rename table back - new code can work with renamed column ##### Changes for working logic Migrations: `ALTER TABLE SET TABLESPACE`, `ALTER TABLE ADD CONSTRAINT EXCLUDE`. For this migration too hard implement logic that will work correctly for all instances, so there are two ways to dealing with it: 1. create new table, copy exist data, drop old table 2. downtime ##### Create column not null Migrations: `ALTER TABLE ADD COLUMN NOT NULL`. Postgres doesn't allow to create column with `NOT NULL` if table not empty and `DEFAULT` is not provided. So you want to `ALTER TABLE ADD COLUMN DEFAULT NOT NULL`. Django has two ways to create column default: [code `default`](https://docs.djangoproject.com/en/dev/ref/models/fields/#default) and [`db_default` for django 5.0+](https://docs.djangoproject.com/en/dev/ref/models/fields/#db-default). Main difference between them for us in operations they do for migration and old code inserts handling after migration: Code `default` migration and business logic SQL: ```sql -- migration ALTER TABLE tbl ADD COLUMN new_col integer DEFAULT 0 NOT NULL; ALTER TABLE tbl ALTER COLUMN new_col DROP DEFAULT; -- business logic INSERT INTO tbl (old_col) VALUES (1); -- old code inserts fail INSERT INTO tbl (old_col, new_col) VALUES (1, 1); -- new code inserts work fine ``` `db_default` migration and business logic SQL: ```sql -- migration ALTER TABLE tbl ADD COLUMN new_col integer DEFAULT 0 NOT NULL; -- business logic INSERT INTO tbl (old_col) VALUES (1); -- old code inserts work fine with default INSERT INTO tbl (old_col, new_col) VALUES (1, 1); -- new code inserts work fine ``` `db_default` is most robust way to apply default and it's works fine with `NOT NULL` constraints too. In django<5.0 you can use `ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT=True` to emulate `db_default` behaviour for `default` field. #### Dealing with `NOT NULL` column constraint Postgres checks that all column values `NOT NULL` (full table scan) when you are applying `ALTER TABLE ALTER COLUMN SET NOT NULL`, this check skipped if appropriate valid `CHECK CONSTRAINT` exists for postgres 12+. So to make existing column `NOT NULL` safe way you can follow next steps: - `ALTER TABLE ADD CONSTRAINT CHECK (column IS NOT NULL) NOT VALID` - create invalid check constraint for column, this operation takes `ACCESS EXCLUSIVE` lock only for table metadata update - `ALTER TABLE VALIDATE CONSTRAINT` - validate constraint, at this moment all column values should be `NOT NULL`, this operation takes `SHARE UPDATE EXCLUSIVE` lock until full table scan will be completed - `ALTER TABLE ALTER COLUMN SET NOT NULL` - set column `NOT NULL` don't check column values if appropriate valid `CHECK CONSTRAINT` exists, in this case this operation takes `ACCESS EXCLUSIVE` lock only for table metadata update - `ALTER TABLE DROP CONSTRAINT` - clean up `CHECK CONSTRAINT` that duplicates column `NOT NULL`, this operation takes `ACCESS EXCLUSIVE` lock only for table metadata update #### Dealing with `UNIQUE` constraint Postgres has two approaches for uniqueness: `CREATE UNIQUE INDEX` and `ALTER TABLE ADD CONSTRAINT UNIQUE` - both use unique index inside. Difference that we can find that we cannot apply `DROP INDEX CONCURRENTLY` for constraint. However it still unclear what difference for `DROP INDEX` and `DROP INDEX CONCURRENTLY` except difference in locks, but as we seen before both marked as safe - we don't spend time in `DROP INDEX`, just wait for lock. So as django use constraint for uniqueness we also have a hacks to use constraint safely. #### Dealing with `ALTER TABLE ALTER COLUMN TYPE` Next operations are safe: 1. `varchar(LESS)` to `varchar(MORE)` where LESS < MORE 2. `varchar(ANY)` to `text` 3. `numeric(LESS, SAME)` to `numeric(MORE, SAME)` where LESS < MORE and SAME == SAME For other operations propose to create new column and copy data to it. Eg. some types can be also safe, but you should check yourself. ================================================ FILE: django_zero_downtime_migrations/__init__.py ================================================ __version__ = "0.19" ================================================ FILE: django_zero_downtime_migrations/backends/__init__.py ================================================ ================================================ FILE: django_zero_downtime_migrations/backends/postgis/__init__.py ================================================ ================================================ FILE: django_zero_downtime_migrations/backends/postgis/base.py ================================================ from django.contrib.gis.db.backends.postgis.base import ( DatabaseWrapper as PostGISDatabaseWrapper ) from .schema import DatabaseSchemaEditor class DatabaseWrapper(PostGISDatabaseWrapper): SchemaEditorClass = DatabaseSchemaEditor ================================================ FILE: django_zero_downtime_migrations/backends/postgis/schema.py ================================================ from django.contrib.gis.db.backends.postgis.schema import PostGISSchemaEditor from django_zero_downtime_migrations.backends.postgres.schema import ( DatabaseSchemaEditorMixin ) class DatabaseSchemaEditor(DatabaseSchemaEditorMixin, PostGISSchemaEditor): pass ================================================ FILE: django_zero_downtime_migrations/backends/postgres/__init__.py ================================================ ================================================ FILE: django_zero_downtime_migrations/backends/postgres/base.py ================================================ from django.db.backends.postgresql.base import ( DatabaseWrapper as PostgresDatabaseWrapper ) from .schema import DatabaseSchemaEditor class DatabaseWrapper(PostgresDatabaseWrapper): SchemaEditorClass = DatabaseSchemaEditor ================================================ FILE: django_zero_downtime_migrations/backends/postgres/schema.py ================================================ import re import warnings from contextlib import contextmanager import django from django.conf import settings from django.contrib.postgres.constraints import ExclusionConstraint from django.db.backends.ddl_references import Statement, Table from django.db.backends.postgresql.schema import ( DatabaseSchemaEditor as PostgresDatabaseSchemaEditor ) from django.db.models import NOT_PROVIDED class Unsafe: ADD_COLUMN_NOT_NULL = ( "ADD COLUMN NOT NULL is unsafe operation\n" "See details for safe alternative " "https://github.com/tbicr/django-pg-zero-downtime-migrations#create-column-not-null" ) ALTER_COLUMN_TYPE = ( "ALTER COLUMN TYPE is unsafe operation\n" "See details for safe alternative " "https://github.com/tbicr/django-pg-zero-downtime-migrations#dealing-with-alter-table-alter-column-type" ) ADD_CONSTRAINT_EXCLUDE = ( "ADD CONSTRAINT EXCLUDE is unsafe operation\n" "See details for safe alternative " "https://github.com/tbicr/django-pg-zero-downtime-migrations#changes-for-working-logic" ) ALTER_TABLE_RENAME = ( "ALTER TABLE RENAME is unsafe operation\n" "See details for save alternative " "https://github.com/tbicr/django-pg-zero-downtime-migrations#rename-models" ) ALTER_TABLE_SET_TABLESPACE = ( "ALTER TABLE SET TABLESPACE is unsafe operation\n" "See details for save alternative " "https://github.com/tbicr/django-pg-zero-downtime-migrations#changes-for-working-logic" ) ALTER_TABLE_RENAME_COLUMN = ( "ALTER TABLE RENAME COLUMN is unsafe operation\n" "See details for save alternative " "https://github.com/tbicr/django-pg-zero-downtime-migrations#rename-columns" ) class UnsafeOperationWarning(Warning): pass class UnsafeOperationException(Exception): pass class DummySQL: def __mod__(self, other): return DUMMY_SQL def format(self, *args, **kwargs): return DUMMY_SQL DUMMY_SQL = DummySQL() class Condition: def __init__(self, sql, exists, idempotent_mode_only=False): self.sql = sql self.exists = exists self.idempotent_mode_only = idempotent_mode_only def __str__(self): return self.sql def __repr__(self): return str(self) def __mod__(self, other): return self.__class__( sql=self.sql % other, exists=self.exists, idempotent_mode_only=self.idempotent_mode_only, ) def format(self, *args, **kwargs): return self.__class__( sql=self.sql.format(*args, **kwargs), exists=self.exists, idempotent_mode_only=self.idempotent_mode_only, ) class MultiStatementSQL(list): def __init__(self, obj, *args): if args: obj = [obj] + list(args) super().__init__(obj) def __str__(self): return '\n'.join(s.rstrip(';') + ';' for s in self) def __repr__(self): return str(self) def __mod__(self, other): if other is DUMMY_SQL: return DUMMY_SQL if isinstance(other, (list, tuple)) and any(arg is DUMMY_SQL for arg in other): return DUMMY_SQL if isinstance(other, dict) and any(val is DUMMY_SQL for val in other.values()): return DUMMY_SQL return MultiStatementSQL(s % other for s in self) def format(self, *args, **kwargs): if any(arg is DUMMY_SQL for arg in args) or any(val is DUMMY_SQL for val in kwargs.values()): return DUMMY_SQL return MultiStatementSQL(s.format(*args, **kwargs) for s in self) class PGLock: def __init__( self, sql, *, use_timeouts=False, disable_statement_timeout=False, idempotent_condition=None, ): self.sql = sql if use_timeouts and disable_statement_timeout: raise ValueError("Can't apply use_timeouts and disable_statement_timeout simultaneously.") self.use_timeouts = use_timeouts self.disable_statement_timeout = disable_statement_timeout self.idempotent_condition = idempotent_condition def __str__(self): return self.sql def __repr__(self): return str(self) def __mod__(self, other): if other is DUMMY_SQL: return DUMMY_SQL if isinstance(other, (list, tuple)) and any(arg is DUMMY_SQL for arg in other): return DUMMY_SQL if isinstance(other, dict) and any(val is DUMMY_SQL for val in other.values()): return DUMMY_SQL return self.__class__( self.sql % other, use_timeouts=self.use_timeouts, disable_statement_timeout=self.disable_statement_timeout, idempotent_condition=self.idempotent_condition % other if self.idempotent_condition is not None else None, ) def format(self, *args, **kwargs): if any(arg is DUMMY_SQL for arg in args) or any(val is DUMMY_SQL for val in kwargs.values()): return DUMMY_SQL return self.__class__( self.sql.format(*args, **kwargs), use_timeouts=self.use_timeouts, disable_statement_timeout=self.disable_statement_timeout, idempotent_condition=self.idempotent_condition.format(*args, **kwargs) if self.idempotent_condition is not None else None, ) class PGAccessExclusive(PGLock): def __init__( self, sql, *, use_timeouts=True, disable_statement_timeout=False, idempotent_condition=None, ): super().__init__( sql, use_timeouts=use_timeouts, disable_statement_timeout=disable_statement_timeout, idempotent_condition=idempotent_condition, ) class PGShareUpdateExclusive(PGLock): def __init__( self, sql, *, use_timeouts=False, disable_statement_timeout=True, idempotent_condition=None, ): super().__init__( sql, use_timeouts=use_timeouts, disable_statement_timeout=disable_statement_timeout, idempotent_condition=idempotent_condition, ) class DatabaseSchemaEditorMixin: ZERO_TIMEOUT = '0ms' _sql_get_lock_timeout = "SELECT setting || unit FROM pg_settings WHERE name = 'lock_timeout'" _sql_get_statement_timeout = "SELECT setting || unit FROM pg_settings WHERE name = 'statement_timeout'" _sql_set_lock_timeout = "SET lock_timeout TO '%(lock_timeout)s'" _sql_set_statement_timeout = "SET statement_timeout TO '%(statement_timeout)s'" _sql_identity_exists = ( "SELECT 1 FROM information_schema.columns " "WHERE table_name = TRIM('\"' FROM '%(table)s') " "AND column_name = TRIM('\"' FROM '%(column)s')" "AND is_identity = 'YES'" ) _sql_sequence_exists = "SELECT 1 FROM pg_class WHERE relname = TRIM('\"' FROM '%(name)s')" _sql_index_exists = "SELECT 1 FROM pg_class WHERE relname = TRIM('\"' FROM '%(name)s')" _sql_table_exists = "SELECT 1 FROM pg_class WHERE relname = TRIM('\"' FROM '%(table)s')" _sql_new_table_exists = "SELECT 1 FROM pg_class WHERE relname = TRIM('\"' FROM '%(new_table)s')" _sql_column_exists = ( "SELECT 1 FROM information_schema.columns " "WHERE table_name = TRIM('\"' FROM '%(table)s') " "AND column_name = TRIM('\"' FROM '%(column)s')" ) _sql_new_column_exists = ( "SELECT 1 FROM information_schema.columns " "WHERE table_name = TRIM('\"' FROM '%(table)s') " "AND column_name = TRIM('\"' FROM '%(new_column)s')" ) _sql_constraint_exists = ( "SELECT 1 FROM information_schema.table_constraints " "WHERE table_name = TRIM('\"' FROM '%(table)s') " "AND constraint_name = TRIM('\"' FROM '%(name)s')" ) _sql_index_valid = ( "SELECT 1 " "FROM pg_index " "WHERE indrelid = TRIM('\"' FROM '%(table)s')::regclass::oid " "AND indexrelid = TRIM('\"' FROM '%(name)s')::regclass::oid " "AND indisvalid" ) _sql_constraint_valid = ( "SELECT 1 " "FROM pg_constraint " "WHERE conrelid = TRIM('\"' FROM '%(table)s')::regclass::oid " "AND conname = TRIM('\"' FROM '%(name)s') " "AND convalidated" ) sql_alter_sequence_type = PGAccessExclusive(PostgresDatabaseSchemaEditor.sql_alter_sequence_type) sql_add_identity = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_add_identity, idempotent_condition=Condition(_sql_identity_exists, False), ) sql_drop_indentity = PGAccessExclusive(PostgresDatabaseSchemaEditor.sql_drop_indentity) sql_delete_sequence = PGAccessExclusive(PostgresDatabaseSchemaEditor.sql_delete_sequence) sql_create_table = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_create_table, idempotent_condition=Condition(_sql_table_exists, False), use_timeouts=False, ) sql_delete_table = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_delete_table, idempotent_condition=Condition(_sql_table_exists, True), use_timeouts=False, ) sql_rename_table = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_rename_table, idempotent_condition=Condition(_sql_new_table_exists, False), ) sql_retablespace_table = PGAccessExclusive(PostgresDatabaseSchemaEditor.sql_retablespace_table) sql_create_column_inline_fk = None sql_create_column = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_create_column, idempotent_condition=Condition(_sql_column_exists, False), ) sql_alter_column = PGAccessExclusive(PostgresDatabaseSchemaEditor.sql_alter_column) sql_delete_column = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_delete_column, idempotent_condition=Condition(_sql_column_exists, True), ) sql_rename_column = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_rename_column, idempotent_condition=Condition(_sql_new_column_exists, False), ) sql_create_check = MultiStatementSQL( PGAccessExclusive( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s) NOT VALID", idempotent_condition=Condition(_sql_constraint_exists, False), ), PGShareUpdateExclusive( "ALTER TABLE %(table)s VALIDATE CONSTRAINT %(name)s", disable_statement_timeout=True, ), ) sql_delete_check = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_delete_check, idempotent_condition=Condition(_sql_constraint_exists, True), ) if django.VERSION[:2] >= (5, 0): sql_create_unique = MultiStatementSQL( PGShareUpdateExclusive( "CREATE UNIQUE INDEX CONCURRENTLY %(name)s ON %(table)s (%(columns)s)%(nulls_distinct)s", idempotent_condition=Condition(_sql_index_exists, False), disable_statement_timeout=True, ), PGShareUpdateExclusive( "REINDEX INDEX CONCURRENTLY %(name)s", idempotent_condition=Condition(_sql_index_valid, False, idempotent_mode_only=True), disable_statement_timeout=True, ), PGAccessExclusive( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE USING INDEX %(name)s%(deferrable)s", idempotent_condition=Condition(_sql_constraint_exists, False), ), ) else: sql_create_unique = MultiStatementSQL( PGShareUpdateExclusive( "CREATE UNIQUE INDEX CONCURRENTLY %(name)s ON %(table)s (%(columns)s)", idempotent_condition=Condition(_sql_index_exists, False), disable_statement_timeout=True, ), PGShareUpdateExclusive( "REINDEX INDEX CONCURRENTLY %(name)s", idempotent_condition=Condition(_sql_index_valid, False, idempotent_mode_only=True), disable_statement_timeout=True, ), PGAccessExclusive( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE USING INDEX %(name)s%(deferrable)s", idempotent_condition=Condition(_sql_constraint_exists, False), ), ) sql_delete_unique = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_delete_unique, idempotent_condition=Condition(_sql_constraint_exists, True), ) sql_create_fk = MultiStatementSQL( PGAccessExclusive( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) " "REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s NOT VALID", idempotent_condition=Condition(_sql_constraint_exists, False), ), PGShareUpdateExclusive( "ALTER TABLE %(table)s VALIDATE CONSTRAINT %(name)s", disable_statement_timeout=True, ), ) sql_delete_fk = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_delete_fk, idempotent_condition=Condition(_sql_constraint_exists, True), ) sql_create_pk = MultiStatementSQL( PGShareUpdateExclusive( "CREATE UNIQUE INDEX CONCURRENTLY %(name)s ON %(table)s (%(columns)s)", idempotent_condition=Condition(_sql_index_exists, False), disable_statement_timeout=True, ), PGShareUpdateExclusive( "REINDEX INDEX CONCURRENTLY %(name)s", idempotent_condition=Condition(_sql_index_valid, False, idempotent_mode_only=True), disable_statement_timeout=True, ), PGAccessExclusive( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY USING INDEX %(name)s", idempotent_condition=Condition(_sql_constraint_exists, False), ), ) sql_delete_pk = PGAccessExclusive( PostgresDatabaseSchemaEditor.sql_delete_pk, idempotent_condition=Condition(_sql_constraint_exists, True), ) sql_create_index = MultiStatementSQL( PGShareUpdateExclusive( PostgresDatabaseSchemaEditor.sql_create_index_concurrently, idempotent_condition=Condition(_sql_index_exists, False), disable_statement_timeout=True, ), PGShareUpdateExclusive( "REINDEX INDEX CONCURRENTLY %(name)s", idempotent_condition=Condition(_sql_index_valid, False, idempotent_mode_only=True), disable_statement_timeout=True, ), ) sql_create_index_concurrently = MultiStatementSQL( PGShareUpdateExclusive( PostgresDatabaseSchemaEditor.sql_create_index_concurrently, idempotent_condition=Condition(_sql_index_exists, False), disable_statement_timeout=True, ), PGShareUpdateExclusive( "REINDEX INDEX CONCURRENTLY %(name)s", idempotent_condition=Condition(_sql_index_valid, False, idempotent_mode_only=True), disable_statement_timeout=True, ), ) if django.VERSION[:2] >= (5, 0): sql_create_unique_index = MultiStatementSQL( PGShareUpdateExclusive( "CREATE UNIQUE INDEX CONCURRENTLY %(name)s ON %(table)s " "(%(columns)s)%(include)s%(nulls_distinct)s%(condition)s", idempotent_condition=Condition(_sql_index_exists, False), disable_statement_timeout=True, ), PGShareUpdateExclusive( "REINDEX INDEX CONCURRENTLY %(name)s", idempotent_condition=Condition(_sql_index_valid, False, idempotent_mode_only=True), disable_statement_timeout=True, ), ) else: sql_create_unique_index = MultiStatementSQL( PGShareUpdateExclusive( "CREATE UNIQUE INDEX CONCURRENTLY %(name)s ON %(table)s " "(%(columns)s)%(include)s%(condition)s", idempotent_condition=Condition(_sql_index_exists, False), disable_statement_timeout=True, ), PGShareUpdateExclusive( "REINDEX INDEX CONCURRENTLY %(name)s", idempotent_condition=Condition(_sql_index_valid, False, idempotent_mode_only=True), disable_statement_timeout=True, ), ) sql_delete_index = PGShareUpdateExclusive("DROP INDEX CONCURRENTLY IF EXISTS %(name)s") sql_delete_index_concurrently = PGShareUpdateExclusive( PostgresDatabaseSchemaEditor.sql_delete_index_concurrently ) sql_alter_table_comment = PGShareUpdateExclusive(PostgresDatabaseSchemaEditor.sql_alter_table_comment) sql_alter_column_comment = PGShareUpdateExclusive(PostgresDatabaseSchemaEditor.sql_alter_column_comment) _sql_column_not_null = MultiStatementSQL( PGAccessExclusive( "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(column)s IS NOT NULL) NOT VALID", idempotent_condition=Condition(_sql_constraint_exists, False), ), PGShareUpdateExclusive( "ALTER TABLE %(table)s VALIDATE CONSTRAINT %(name)s", disable_statement_timeout=True, ), PGAccessExclusive("ALTER TABLE %(table)s ALTER COLUMN %(column)s SET NOT NULL"), PGAccessExclusive( "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s", idempotent_condition=Condition(_sql_constraint_exists, True), ), ) _sql_get_table_constraints_introspection = r""" SELECT c.conname, c.contype, c.conrelid::regclass::text, c.confrelid::regclass::text, array( SELECT attname FROM unnest(c.conkey) WITH ORDINALITY cols(colid, arridx) JOIN pg_attribute AS ca ON cols.colid = ca.attnum WHERE ca.attrelid = c.conrelid ORDER BY cols.arridx ), array( SELECT attname FROM unnest(c.confkey) WITH ORDINALITY cols(colid, arridx) JOIN pg_attribute AS ca ON cols.colid = ca.attnum WHERE ca.attrelid = c.confrelid ORDER BY cols.arridx ) FROM pg_constraint AS c WHERE c.conrelid::regclass::text = %s OR c.confrelid::regclass::text = %s ORDER BY c.conrelid::regclass::text, c.conname """ _sql_get_index_introspection = r""" SELECT i.indexrelid::regclass::text, i.indrelid::regclass::text, array( SELECT a.attname FROM ( SELECT unnest(i.indkey) UNION SELECT unnest(regexp_matches(i.indexprs::text, ':varattno (\d+)', 'g'))::int UNION SELECT unnest(regexp_matches(i.indpred::text, ':varattno (\d+)', 'g'))::int ) cols(varattno) INNER JOIN pg_attribute AS a ON cols.varattno = a.attnum WHERE a.attrelid = i.indrelid ) FROM pg_index i LEFT JOIN pg_constraint c ON i.indexrelid = c.conindid WHERE indrelid::regclass::text = %s AND c.conindid IS NULL ORDER BY i.indrelid::regclass::text, i.indexrelid::regclass::text """ _varchar_type_regexp = re.compile(r'^varchar\((?P\d+)\)$') _numeric_type_regexp = re.compile(r'^numeric\((?P\d+), *(?P\d+)\)$') @property def sql_alter_column_no_default_null(self): if self.KEEP_DEFAULT: return DUMMY_SQL return super().sql_alter_column_no_default_null @property def sql_alter_column_no_default(self): if self.KEEP_DEFAULT: return DUMMY_SQL return super().sql_alter_column_no_default def __init__(self, connection, collect_sql=False, atomic=True): # Disable atomic transactions as it can be reason of downtime or deadlock # in case if you combine many operation in one migration module. super().__init__(connection, collect_sql=collect_sql, atomic=False) # Avoid using DUMMY_SQL in combined alters connection.features.supports_combined_alters = False # Get settings with defaults self.LOCK_TIMEOUT = getattr(settings, "ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT", None) self.STATEMENT_TIMEOUT = getattr(settings, "ZERO_DOWNTIME_MIGRATIONS_STATEMENT_TIMEOUT", None) self.FLEXIBLE_STATEMENT_TIMEOUT = getattr( settings, "ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT", False) self.RAISE_FOR_UNSAFE = getattr(settings, "ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE", False) self.DEFERRED_SQL = getattr(settings, "ZERO_DOWNTIME_MIGRATIONS_DEFERRED_SQL", True) self.IDEMPOTENT_SQL = ( getattr(settings, "ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL", False) if not collect_sql else False # disable idempotent mode for sqlmigrate ) self.KEEP_DEFAULT = getattr(settings, "ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT", False) if django.VERSION[:2] >= (5, 0) and hasattr(settings, 'ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT'): warnings.warn( 'settings.ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT is not applicable for django 5.0+. ' 'Please remove this setting.', DeprecationWarning, ) self.KEEP_DEFAULT = False self.EXPLICIT_CONSTRAINTS_DROP = getattr(settings, "ZERO_DOWNTIME_MIGRATIONS_EXPLICIT_CONSTRAINTS_DROP", True) def execute(self, sql, params=()): if sql is DUMMY_SQL: return statements = [] if isinstance(sql, MultiStatementSQL): statements.extend(sql) elif isinstance(sql, Statement) and isinstance(sql.template, MultiStatementSQL): statements.extend(Statement(s, **sql.parts) for s in sql.template) else: statements.append(sql) for statement in statements: idempotent_condition = None if isinstance(statement, PGLock): use_timeouts = statement.use_timeouts disable_statement_timeout = statement.disable_statement_timeout idempotent_condition = statement.idempotent_condition statement = statement.sql elif isinstance(statement, Statement) and isinstance(statement.template, PGLock): use_timeouts = statement.template.use_timeouts disable_statement_timeout = statement.template.disable_statement_timeout if statement.template.idempotent_condition is not None: idempotent_condition = statement.template.idempotent_condition % statement.parts statement = Statement(statement.template.sql, **statement.parts) else: use_timeouts = False disable_statement_timeout = False if not self._skip_applied(idempotent_condition): if use_timeouts: with self._set_operation_timeout(self.STATEMENT_TIMEOUT, self.LOCK_TIMEOUT): super().execute(statement, params) elif disable_statement_timeout and self.FLEXIBLE_STATEMENT_TIMEOUT: with self._set_operation_timeout(self.ZERO_TIMEOUT): super().execute(statement, params) else: super().execute(statement, params) def _skip_applied(self, idempotent_condition: Condition) -> bool: if idempotent_condition is None: return False if not self.IDEMPOTENT_SQL: # in case of failure of creating indexes concurrently index will be created but will be invalid # for this case reindex statement added to recreate valid index in IDEMPOTENT_SQL mode # but if IDEMPOTENT_SQL mode is disabled we need to skip this extra reindex sql return idempotent_condition.idempotent_mode_only with self.connection.cursor() as cursor: cursor.execute(idempotent_condition.sql) exists = cursor.fetchone() is not None if idempotent_condition.exists: return not exists return exists @contextmanager def _set_operation_timeout(self, statement_timeout=None, lock_timeout=None): if self.collect_sql: previous_statement_timeout = self.ZERO_TIMEOUT previous_lock_timeout = self.ZERO_TIMEOUT else: with self.connection.cursor() as cursor: cursor.execute(self._sql_get_statement_timeout) previous_statement_timeout, = cursor.fetchone() cursor.execute(self._sql_get_lock_timeout) previous_lock_timeout, = cursor.fetchone() if statement_timeout is not None: self.execute(self._sql_set_statement_timeout % {"statement_timeout": statement_timeout}) if lock_timeout is not None: self.execute(self._sql_set_lock_timeout % {"lock_timeout": lock_timeout}) yield if statement_timeout is not None: self.execute(self._sql_set_statement_timeout % {"statement_timeout": previous_statement_timeout}) if lock_timeout is not None: self.execute(self._sql_set_lock_timeout % {"lock_timeout": previous_lock_timeout}) def _flush_deferred_sql(self): """As some alternative sql use deferred sql and deferred sql run after all operations in migration module so good idea to run deferred sql as soon as possible to provide similar as possible state between operations in migration module. But this approach can be reason of errors for some migrations. As only constraints creation placed in deferred sql it looks safe to keep standard django deferred sql run approach. # TODO: drop option to run deferred sql as soon as possible in future """ if not self.DEFERRED_SQL: for sql in self.deferred_sql: self.execute(sql) self.deferred_sql.clear() def _get_constraints(self, cursor, model): cursor.execute(self._sql_get_table_constraints_introspection, [model._meta.db_table, model._meta.db_table]) for constraint, kind, table, table_ref, columns, columns_ref in cursor.fetchall(): yield constraint, kind, table, table_ref, columns, columns_ref def _get_indexes(self, cursor, model): cursor.execute(self._sql_get_index_introspection, [model._meta.db_table]) for index, table, columns in cursor.fetchall(): yield index, table, columns def _drop_collect_sql_introspection_related_duplicates(self, drop_constraint_queries): """ django internals use introspection to find related constraints and perform action if constraint exists dropping constraints before dropping table or column can duplicate same logic in django internals in this case for sqlmigrate drop constraint sql can be duplicated as no physical constraint drop perform so just remove constraint drop duplicates for sqlmigrate """ if self.collect_sql: handled_queries = set() drops = set() for i in range(len(self.collected_sql)): for j in range(len(drop_constraint_queries)): if all( self.collected_sql[i + k] == drop_constraint_queries[j][k] for k in range(len(drop_constraint_queries[j])) ): if j in handled_queries: drops |= {i + k for k in range(len(drop_constraint_queries[j]))} handled_queries.add(j) self.collected_sql = [query for i, query in enumerate(self.collected_sql) if i not in drops] def create_model(self, model): super().create_model(model) self._flush_deferred_sql() def delete_model(self, model): drop_constraint_queries = [] if self.EXPLICIT_CONSTRAINTS_DROP: with self.connection.cursor() as cursor: for constraint, kind, table, table_ref, columns, columns_ref in self._get_constraints(cursor, model): if kind == "f": last_collected_sql = len(self.collected_sql) if self.collect_sql else None self.execute(Statement( self.sql_delete_fk, table=Table(table, self.quote_name), name=self.quote_name(constraint), )) if self.collect_sql: drop_constraint_queries.append(self.collected_sql[last_collected_sql:]) super().delete_model(model) self._flush_deferred_sql() self._drop_collect_sql_introspection_related_duplicates(drop_constraint_queries) def alter_index_together(self, model, old_index_together, new_index_together): super().alter_index_together(model, old_index_together, new_index_together) self._flush_deferred_sql() def alter_unique_together(self, model, old_unique_together, new_unique_together): super().alter_unique_together(model, old_unique_together, new_unique_together) self._flush_deferred_sql() def add_index(self, model, index, concurrently=False): super().add_index(model, index, concurrently=concurrently) self._flush_deferred_sql() def remove_index(self, model, index, concurrently=False): super().remove_index(model, index, concurrently=concurrently) self._flush_deferred_sql() def add_constraint(self, model, constraint): if isinstance(constraint, ExclusionConstraint): if self.RAISE_FOR_UNSAFE: raise UnsafeOperationException(Unsafe.ADD_CONSTRAINT_EXCLUDE) else: warnings.warn(UnsafeOperationWarning(Unsafe.ADD_CONSTRAINT_EXCLUDE)) super().add_constraint(model, constraint) self._flush_deferred_sql() def remove_constraint(self, model, constraint): super().remove_constraint(model, constraint) self._flush_deferred_sql() def add_field(self, model, field): super().add_field(model, field) self._flush_deferred_sql() def remove_field(self, model, field): drop_constraint_queries = [] if self.EXPLICIT_CONSTRAINTS_DROP: with self.connection.cursor() as cursor: # as foreign key can have index as dependent object it important to drop all foreign keys first for constraint, kind, table, table_ref, columns, columns_ref in self._get_constraints(cursor, model): # drop foreign key for current model columns if kind == "f" and table == model._meta.db_table and field.column in columns: last_collected_sql = len(self.collected_sql) if self.collect_sql else None self.execute(Statement( self.sql_delete_fk, table=Table(table, self.quote_name), name=self.quote_name(constraint), )) if self.collect_sql: drop_constraint_queries.append(self.collected_sql[last_collected_sql:]) # drop foreign key for target model columns, i.e. backrefs if kind == "f" and table_ref == model._meta.db_table and field.column in columns_ref: last_collected_sql = len(self.collected_sql) if self.collect_sql else None self.execute(Statement( self.sql_delete_fk, table=Table(table, self.quote_name), name=self.quote_name(constraint), )) if self.collect_sql: drop_constraint_queries.append(self.collected_sql[last_collected_sql:]) for constraint, kind, table, table_ref, columns, columns_ref in self._get_constraints(cursor, model): # drop unique constraints for current model columns if kind == "u" and table == model._meta.db_table and field.column in columns: last_collected_sql = len(self.collected_sql) if self.collect_sql else None self.execute(Statement( self.sql_delete_unique, table=Table(table, self.quote_name), name=self.quote_name(constraint), )) if self.collect_sql: drop_constraint_queries.append(self.collected_sql[last_collected_sql:]) for index, table, columns in self._get_indexes(cursor, model): # drop indexes for current model columns if table == model._meta.db_table and field.column in columns: last_collected_sql = len(self.collected_sql) if self.collect_sql else None self.execute(Statement( self.sql_delete_index_concurrently, table=Table(table, self.quote_name), name=self.quote_name(index), )) if self.collect_sql: drop_constraint_queries.append(self.collected_sql[last_collected_sql:]) super().remove_field(model, field) self._flush_deferred_sql() self._drop_collect_sql_introspection_related_duplicates(drop_constraint_queries) def alter_field(self, model, old_field, new_field, strict=False): super().alter_field(model, old_field, new_field, strict) self._flush_deferred_sql() def alter_db_table(self, model, old_db_table, new_db_table): # Disregard cases where db_table is unchanged if old_db_table != new_db_table: if self.RAISE_FOR_UNSAFE: raise UnsafeOperationException(Unsafe.ALTER_TABLE_RENAME) else: warnings.warn(UnsafeOperationWarning(Unsafe.ALTER_TABLE_RENAME)) super().alter_db_table(model, old_db_table, new_db_table) self._flush_deferred_sql() def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace): if self.RAISE_FOR_UNSAFE: raise UnsafeOperationException(Unsafe.ALTER_TABLE_SET_TABLESPACE) else: warnings.warn(UnsafeOperationWarning(Unsafe.ALTER_TABLE_SET_TABLESPACE)) super().alter_db_tablespace(model, old_db_tablespace, new_db_tablespace) self._flush_deferred_sql() def alter_db_table_comment(self, model, old_db_table_comment, new_db_table_comment): super().alter_db_table_comment(model, old_db_table_comment, new_db_table_comment) self._flush_deferred_sql() def _rename_field_sql(self, table, old_field, new_field, new_type): if self.RAISE_FOR_UNSAFE: raise UnsafeOperationException(Unsafe.ALTER_TABLE_RENAME_COLUMN) else: warnings.warn(UnsafeOperationWarning(Unsafe.ALTER_TABLE_RENAME_COLUMN)) return super()._rename_field_sql(table, old_field, new_field, new_type) def _has_db_default(self, field): if django.VERSION < (5, 0): if self.KEEP_DEFAULT: return field.default is not NOT_PROVIDED return False if django.VERSION >= (5, 2): return field.has_db_default() return field.db_default is not NOT_PROVIDED def _add_column_not_null(self, model, field): if not self._has_db_default(field): if self.RAISE_FOR_UNSAFE: raise UnsafeOperationException(Unsafe.ADD_COLUMN_NOT_NULL) else: warnings.warn(UnsafeOperationWarning(Unsafe.ADD_COLUMN_NOT_NULL)) return "NOT NULL" def _add_column_primary_key(self, model, field): self.deferred_sql.append(self.sql_create_pk % { "table": self.quote_name(model._meta.db_table), "name": self.quote_name(self._create_index_name(model._meta.db_table, [field.column], suffix="_pk")), "columns": self.quote_name(field.column), }) return "" def _add_column_unique(self, model, field): self.deferred_sql.append(self._create_unique_sql(model, [field])) return "" def _patched_iter_column_sql( self, column_db_type, params, model, field, field_db_params, include_default ): yield column_db_type if collation := field_db_params.get("collation"): yield self._collate_sql(collation) if self.connection.features.supports_comments_inline and field.db_comment: yield self._comment_sql(field.db_comment) # Work out nullability. null = field.null # Add database default. if ( django.VERSION >= (5, 2) and field.has_db_default() or django.VERSION >= (5, 0) and field.db_default is not NOT_PROVIDED ): default_sql, default_params = self.db_default_sql(field) yield f"DEFAULT {default_sql}" params.extend(default_params) include_default = False # Include a default value, if requested. include_default = ( include_default and not self.skip_default(field) and # Don't include a default value if it's a nullable field and the # default cannot be dropped in the ALTER COLUMN statement (e.g. # MySQL longtext and longblob). not (null and self.skip_default_on_alter(field)) ) if include_default: default_value = self.effective_default(field) if default_value is not None: column_default = "DEFAULT " + self._column_default_sql(field) if self.connection.features.requires_literal_defaults: # Some databases can't take defaults as a parameter # (Oracle, SQLite). If this is the case, the individual # schema backend should implement prepare_default(). yield column_default % self.prepare_default(default_value) else: yield column_default params.append(default_value) # Oracle treats the empty string ('') as null, so coerce the null # option whenever '' is a possible value. if ( field.empty_strings_allowed and not field.primary_key and self.connection.features.interprets_empty_strings_as_nulls ): null = True if django.VERSION >= (5, 0) and field.generated: generated_sql, generated_params = self._column_generated_sql(field) params.extend(generated_params) yield generated_sql elif not null: yield self._add_column_not_null(model, field) # different to origin method elif not self.connection.features.implied_column_null: yield "NULL" if field.primary_key: self._add_column_primary_key(model, field) # different to origin method elif field.unique: self._add_column_unique(model, field) # different to origin method # Optionally add the tablespace if it's an implicitly indexed column. tablespace = field.db_tablespace or model._meta.db_tablespace if ( tablespace and self.connection.features.supports_tablespaces and field.unique ): yield self.connection.ops.tablespace_sql(tablespace, inline=True) def _iter_column_sql( self, column_db_type, params, model, field, field_db_params, include_default ): if not include_default: yield from super()._iter_column_sql( column_db_type, params, model, field, field_db_params, include_default, ) else: yield from self._patched_iter_column_sql( column_db_type, params, model, field, field_db_params, include_default, ) def _alter_column_set_not_null(self, model, new_field): self.deferred_sql.append(self._sql_column_not_null % { "column": self.quote_name(new_field.column), "table": self.quote_name(model._meta.db_table), "name": self.quote_name( self._create_index_name(model._meta.db_table, [new_field.column], suffix="_notnull") ), }) return DUMMY_SQL, [] def _alter_column_drop_not_null(self, model, new_field): return self.sql_alter_column_null % { "column": self.quote_name(new_field.column), }, [] def _alter_column_null_sql(self, model, old_field, new_field): if new_field.null: return self._alter_column_drop_not_null(model, new_field) else: return self._alter_column_set_not_null(model, new_field) def _immediate_type_cast(self, old_type, new_type): if ( (old_type == new_type) or (old_type == 'integer' and new_type == 'serial') or (old_type == 'bigint' and new_type == 'bigserial') or (old_type == 'smallint' and new_type == 'smallserial') or (old_type == 'serial' and new_type == 'integer') or (old_type == 'bigserial' and new_type == 'bigint') or (old_type == 'smallserial' and new_type == 'smallint') ): return True old_type_varchar_match = self._varchar_type_regexp.match(old_type) if old_type_varchar_match: if new_type == "text": return True new_type_varchar_match = self._varchar_type_regexp.match(new_type) if new_type_varchar_match: old_type_max_length = int(old_type_varchar_match.group("max_length")) new_type_max_length = int(new_type_varchar_match.group("max_length")) if new_type_max_length >= old_type_max_length: return True else: return False old_type_numeric_match = self._numeric_type_regexp.match(old_type) if old_type_numeric_match: new_type_numeric_match = self._numeric_type_regexp.match(new_type) old_type_precision = int(old_type_numeric_match.group("precision")) old_type_scale = int(old_type_numeric_match.group("scale")) try: new_type_precision = int(new_type_numeric_match.group("precision")) new_type_scale = int(new_type_numeric_match.group("scale")) except AttributeError: return False return new_type_precision >= old_type_precision and new_type_scale == old_type_scale return False def _alter_column_type_sql(self, model, old_field, new_field, new_type, old_collation, new_collation): old_db_params = old_field.db_parameters(connection=self.connection) old_type = old_db_params["type"] if not self._immediate_type_cast(old_type, new_type): if self.RAISE_FOR_UNSAFE: raise UnsafeOperationException(Unsafe.ALTER_COLUMN_TYPE) else: warnings.warn(UnsafeOperationWarning(Unsafe.ALTER_COLUMN_TYPE)) return super()._alter_column_type_sql(model, old_field, new_field, new_type, old_collation, new_collation) class DatabaseSchemaEditor(DatabaseSchemaEditorMixin, PostgresDatabaseSchemaEditor): pass ================================================ FILE: docker-compose.yml ================================================ services: pg17: image: postgres:17-alpine environment: POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - ./docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql pg16: image: postgres:16-alpine environment: POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - ./docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql pg15: image: postgres:15-alpine environment: POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - ./docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql pg14: image: postgres:14-alpine environment: POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - ./docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql pg13: image: postgres:13-alpine environment: POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - ./docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql pg12: image: postgres:12-alpine environment: POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - ./docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql pg11: image: postgres:11-alpine environment: POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - ./docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql postgis17: image: postgis/postgis:17-3.5-alpine environment: POSTGRES_USER: root POSTGRES_PASSWORD: root volumes: - ./docker_postgres_init.sql:/docker-entrypoint-initdb.d/docker_postgres_init.sql django-pg-zero-downtime-migrations-tests: image: django-pg-zero-downtime-migrations:latest build: . depends_on: - pg17 - pg16 - pg15 - pg14 - pg13 - pg12 - pg11 - postgis17 volumes: - .:/app ================================================ FILE: docker_postgres_init.sql ================================================ CREATE USER test WITH PASSWORD 'test' CREATEDB; ================================================ FILE: manage.py ================================================ #!/usr/bin/env python """Django's command-line utility for administrative tasks.""" import os import sys def main(): os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tests.settings_make_migrations') try: from django.core.management import execute_from_command_line except ImportError as exc: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc execute_from_command_line(sys.argv) if __name__ == '__main__': main() ================================================ FILE: setup.cfg ================================================ [flake8] max-line-length = 120 exclude = .tox/,venv/ [isort] combine_as_imports = true known_django = django known_first_party = django_zero_downtime_migrations,tests sections = FUTURE,STDLIB,DJANGO,THIRDPARTY,FIRSTPARTY,LOCALFOLDER default_section = THIRDPARTY line_length = 79 multi_line_output = 5 skip = .tox/,venv/ [tool:pytest] addopts = --verbose python_files = tests/*/test*.py DJANGO_SETTINGS_MODULE = tests.settings ================================================ FILE: setup.py ================================================ from setuptools import find_packages, setup VERSION = __import__('django_zero_downtime_migrations').__version__ def _replace_internal_images_with_external(text): return text.replace( '(images/', '(https://raw.githubusercontent.com/tbicr/django-pg-zero-downtime-migrations/' '{VERSION}/images/'.format(VERSION=VERSION), ) def _get_long_description(): with open('README.md') as readme_handle: readme = readme_handle.read() with open('CHANGES.md') as changes_handle: changes = changes_handle.read() return _replace_internal_images_with_external(readme) + '\n\n' + changes setup( name='django-pg-zero-downtime-migrations', version=VERSION, author='Paveł Tyślacki', author_email='pavel.tyslacki@gmail.com', license='MIT', url='https://github.com/tbicr/django-pg-zero-downtime-migrations', description='Django postgresql backend that apply migrations with respect to database locks', long_description=_get_long_description(), long_description_content_type='text/markdown', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3.13', 'Framework :: Django', 'Framework :: Django :: 4.2', 'Framework :: Django :: 5.0', 'Framework :: Django :: 5.1', 'Framework :: Django :: 5.2', ], keywords='django postgres postgresql migrations', packages=find_packages(exclude=['manage*', 'tests*']), python_requires='>=3.8', install_requires=[ 'django>=4.2', ] ) ================================================ FILE: tests/__init__.py ================================================ from django.conf import settings import pytest skip_for_default_django_backend = pytest.mark.skipif( settings.DATABASES['default']['ENGINE'] in ( 'django.db.backends.postgresql', 'django.contrib.gis.db.backends.postgis', ), reason='not actual for default django backends' ) ================================================ FILE: tests/apps/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_add_column_with_default_app/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_add_column_with_default_app/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:47 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), ] ================================================ FILE: tests/apps/bad_flow_add_column_with_default_app/migrations/0002_add_field_default.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:47 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('bad_flow_add_column_with_default_app', '0001_initial'), ] operations = [ migrations.AddField( model_name='testtable', name='field', field=models.IntegerField(default=0), ), ] ================================================ FILE: tests/apps/bad_flow_add_column_with_default_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_add_column_with_default_app/models.py ================================================ from django.db import models class TestTable(models.Model): field = models.IntegerField(default=0, null=False) ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_app/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_app/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:45 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), ] ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_app/migrations/0002_add_field_notnull.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:46 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('bad_flow_add_column_with_notnull_app', '0001_initial'), ] operations = [ migrations.AddField( model_name='testtable', name='field', field=models.IntegerField(), ), ] ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_app/models.py ================================================ from django.db import models class TestTable(models.Model): field = models.IntegerField() ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_default_app/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_default_app/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:41 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), ] ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_default_app/migrations/0002_add_field_notnull_default.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:43 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('bad_flow_add_column_with_notnull_default_app', '0001_initial'), ] operations = [ migrations.AddField( model_name='testtable', name='field', field=models.IntegerField(default=0), ), ] ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_default_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_add_column_with_notnull_default_app/models.py ================================================ from django.db import models class TestTable(models.Model): field = models.IntegerField(default=0) ================================================ FILE: tests/apps/bad_flow_change_char_type_that_unsafe_app/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_change_char_type_that_unsafe_app/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:37 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('field', models.CharField(max_length=120)), ], ), ] ================================================ FILE: tests/apps/bad_flow_change_char_type_that_unsafe_app/migrations/0002_change_type_from_char120_to_char100.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:38 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('bad_flow_change_char_type_that_unsafe_app', '0001_initial'), ] operations = [ migrations.AlterField( model_name='testtable', name='field', field=models.CharField(max_length=100), ), ] ================================================ FILE: tests/apps/bad_flow_change_char_type_that_unsafe_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/bad_flow_change_char_type_that_unsafe_app/models.py ================================================ from django.db import models class TestTable(models.Model): field = models.CharField(max_length=100) ================================================ FILE: tests/apps/bad_rollback_flow_change_char_type_that_safe_app/__init__.py ================================================ ================================================ FILE: tests/apps/bad_rollback_flow_change_char_type_that_safe_app/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:26 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('char_filed', models.CharField(max_length=100)), ], ), ] ================================================ FILE: tests/apps/bad_rollback_flow_change_char_type_that_safe_app/migrations/0002_change_type_safe_from_char100_to_char120.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:27 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('bad_rollback_flow_change_char_type_that_safe_app', '0001_initial'), ] operations = [ migrations.AlterField( model_name='testtable', name='char_filed', field=models.CharField(max_length=120), ), ] ================================================ FILE: tests/apps/bad_rollback_flow_change_char_type_that_safe_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/bad_rollback_flow_change_char_type_that_safe_app/models.py ================================================ from django.db import models class TestTable(models.Model): char_filed = models.CharField(max_length=120) ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_app/__init__.py ================================================ ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_app/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:00 from django.db import migrations, models def insert_objects(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('bad_rollback_flow_drop_column_with_notnull_app', 'TestTable') TestTable.objects.using(db_alias).create(field=1) class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('field', models.IntegerField()), ], ), migrations.RunPython(insert_objects, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_app/migrations/0002_drop_field_not_null.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:02 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('bad_rollback_flow_drop_column_with_notnull_app', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='testtable', name='field', ), ] ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_app/models.py ================================================ from django.db import models class TestTable(models.Model): pass ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_default_app/__init__.py ================================================ ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_default_app/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:19 from django.db import migrations, models def insert_objects(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('bad_rollback_flow_drop_column_with_notnull_default_app', 'TestTable') TestTable.objects.using(db_alias).create(field=1) class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('field', models.IntegerField(default=0)), ], ), migrations.RunPython(insert_objects, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_default_app/migrations/0002_drop_field_not_null_default.py ================================================ # Generated by Django 3.1 on 2019-09-22 21:20 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('bad_rollback_flow_drop_column_with_notnull_default_app', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='testtable', name='field', ), ] ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_default_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/bad_rollback_flow_drop_column_with_notnull_default_app/models.py ================================================ from django.db import models class TestTable(models.Model): pass ================================================ FILE: tests/apps/decimal_to_float_app/__init__.py ================================================ ================================================ FILE: tests/apps/decimal_to_float_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Value', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('amount', models.DecimalField(blank=True, decimal_places=4, default=None, max_digits=12, null=True)), ], options={ 'abstract': False, }, ), ] ================================================ FILE: tests/apps/decimal_to_float_app/migrations/0002_type_conversion.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('decimal_to_float_app', '0001_initial'), ] operations = [ migrations.AlterField( model_name='value', name='amount', field=models.FloatField(blank=True, default=None, null=True), ), ] ================================================ FILE: tests/apps/decimal_to_float_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/decimal_to_float_app/models.py ================================================ from django.db import models class Value(models.Model): amount = models.FloatField( null=True, default=None, blank=True, ) ================================================ FILE: tests/apps/good_flow_alter_table_with_same_db_table/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_alter_table_with_same_db_table/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2021-12-30 13:37 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('field', models.IntegerField()), ], options={ 'db_table': 'test_table', }, ), ] ================================================ FILE: tests/apps/good_flow_alter_table_with_same_db_table/migrations/0002_rename_model.py ================================================ # Generated by Django 3.1 on 2021-12-30 23:59 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_alter_table_with_same_db_table', '0001_initial'), ] operations = [ migrations.RenameModel( old_name='TestTable', new_name='TestTableRenamed', ), ] ================================================ FILE: tests/apps/good_flow_alter_table_with_same_db_table/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_alter_table_with_same_db_table/models.py ================================================ from django.db import models class TestTableRenamed(models.Model): class Meta: db_table = 'test_table' ================================================ FILE: tests/apps/good_flow_app/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_app/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-21 20:09 import django.contrib.postgres.search from django.db import migrations, models def insert_objects(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('good_flow_app', 'TestTable') TestTable.objects.using(db_alias).create(test_field_int=1) class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='RelatedTestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('test_field_int', models.IntegerField()), ('test_field_str', models.CharField(max_length=10)), ('test_field_tsv', django.contrib.postgres.search.SearchVectorField()), ], ), migrations.RunPython(insert_objects, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0002_add_nullable_field.py ================================================ # Generated by Django 3.1 on 2019-09-21 20:15 from django.db import migrations, models def insert_objects_and_null_check(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('good_flow_app', 'TestTable') instance = TestTable.objects.using(db_alias).create(test_field_int=1) assert instance.field is None instance.delete() class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0001_initial'), ] operations = [ migrations.AddField( model_name='testtable', name='field', field=models.IntegerField(null=True), ), migrations.RunPython(insert_objects_and_null_check, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0003_set_field_default.py ================================================ # Generated by Django 3.1 on 2019-09-21 20:16 from django.db import migrations, models def insert_objects_and_default_check(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('good_flow_app', 'TestTable') instance = TestTable.objects.using(db_alias).create(test_field_int=1) assert instance.field == 0 instance.delete() instance = TestTable.objects.using(db_alias).create(test_field_int=1, field=None) assert instance.field is None instance.delete() class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0002_add_nullable_field'), ] operations = [ migrations.AlterField( model_name='testtable', name='field', field=models.IntegerField(default=0, null=True), ), migrations.RunPython(insert_objects_and_default_check, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0004_set_field_not_null.py ================================================ # Generated by Django 3.1 on 2019-09-21 20:16 from django.db import IntegrityError, migrations, models def flush_deferred_sql(apps, schema_editor): for sql in schema_editor.deferred_sql: schema_editor.execute(sql) def update_objects(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('good_flow_app', 'TestTable') TestTable.objects.using(db_alias).update(field=0) def insert_objects_and_not_null_check(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('good_flow_app', 'TestTable') instance = TestTable.objects.using(db_alias).create(test_field_int=1) assert instance.field == 0 instance.delete() try: TestTable.objects.using(db_alias).create(test_field_int=1, field=None) assert False except IntegrityError: pass class Migration(migrations.Migration): atomic = False # avoid transaction issue for default django backend check dependencies = [ ('good_flow_app', '0003_set_field_default'), ] operations = [ migrations.RunPython(update_objects, migrations.RunPython.noop), migrations.AlterField( model_name='testtable', name='field', field=models.IntegerField(default=0), ), migrations.RunPython(flush_deferred_sql, migrations.RunPython.noop), migrations.RunPython(insert_objects_and_not_null_check, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0005_drop_field_not_null.py ================================================ # Generated by Django 3.1 on 2019-09-21 20:17 from django.db import migrations, models def insert_objects_and_default_check(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('good_flow_app', 'TestTable') instance = TestTable.objects.using(db_alias).create(test_field_int=1) assert instance.field == 0 instance.delete() instance = TestTable.objects.using(db_alias).create(test_field_int=1, field=None) assert instance.field is None instance.delete() class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0004_set_field_not_null'), ] operations = [ migrations.AlterField( model_name='testtable', name='field', field=models.IntegerField(default=0, null=True), ), migrations.RunPython(insert_objects_and_default_check, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0006_drop_field_default.py ================================================ # Generated by Django 3.1 on 2019-09-21 20:17 from django.db import migrations, models def insert_objects_and_null_check(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('good_flow_app', 'TestTable') instance = TestTable.objects.using(db_alias).create(test_field_int=1) assert instance.field is None instance.delete() class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0005_drop_field_not_null'), ] operations = [ migrations.AlterField( model_name='testtable', name='field', field=models.IntegerField(null=True), ), migrations.RunPython(insert_objects_and_null_check, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0007_drop_field.py ================================================ # Generated by Django 3.1 on 2019-09-21 20:18 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0006_drop_field_default'), ] operations = [ migrations.RemoveField( model_name='testtable', name='field', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0008_add_field_with_check_constraint.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:39 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0007_drop_field'), ] operations = [ migrations.AddField( model_name='testtable', name='field', field=models.PositiveIntegerField(null=True), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0009_drop_field_with_check_constraint.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:39 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0008_add_field_with_check_constraint'), ] operations = [ migrations.RemoveField( model_name='testtable', name='field', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0010_add_field_with_foreign_key.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:42 import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0009_drop_field_with_check_constraint'), ] operations = [ migrations.AddField( model_name='testtable', name='field', field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to='good_flow_app.RelatedTestTable' ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0011_drop_field_with_foreign_key.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:42 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0010_add_field_with_foreign_key'), ] operations = [ migrations.RemoveField( model_name='testtable', name='field', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0012_add_field_with_unique_constraint.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:43 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0011_drop_field_with_foreign_key'), ] operations = [ migrations.AddField( model_name='testtable', name='field', field=models.IntegerField(null=True, unique=True), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0013_drop_field_with_unique_constraint.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:43 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0012_add_field_with_unique_constraint'), ] operations = [ migrations.RemoveField( model_name='testtable', name='field', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0014_add_field_with_index.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:44 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0013_drop_field_with_unique_constraint'), ] operations = [ migrations.AddField( model_name='testtable', name='field', field=models.IntegerField(db_index=True, null=True), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0015_drop_field_with_index.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:44 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0014_add_field_with_index'), ] operations = [ migrations.RemoveField( model_name='testtable', name='field', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0016_add_check_constraint.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:49 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0015_drop_field_with_index'), ] operations = [ migrations.AddConstraint( model_name='testtable', constraint=models.CheckConstraint(check=models.Q(test_field_int__gt=0), name='test_check_constraint'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0017_drop_check_constraint.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:49 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0016_add_check_constraint'), ] operations = [ migrations.RemoveConstraint( model_name='testtable', name='test_check_constraint', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0018_add_unique_constraint.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:51 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0017_drop_check_constraint'), ] operations = [ migrations.AddConstraint( model_name='testtable', constraint=models.UniqueConstraint(fields=('test_field_int',), name='test_uniq_constraint'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0019_drop_unique_constraint.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:51 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0018_add_unique_constraint'), ] operations = [ migrations.RemoveConstraint( model_name='testtable', name='test_uniq_constraint', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0020_add_unique_constraint_with_condition.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:52 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0019_drop_unique_constraint'), ] operations = [ migrations.AddConstraint( model_name='testtable', constraint=models.UniqueConstraint( condition=models.Q(test_field_int__isnull=False), fields=('test_field_int',), name='test_uniq_constraint', ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0021_drop_unique_constraint_with_condition.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:52 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0020_add_unique_constraint_with_condition'), ] operations = [ migrations.RemoveConstraint( model_name='testtable', name='test_uniq_constraint', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0022_add_index.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:53 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0021_drop_unique_constraint_with_condition'), ] operations = [ migrations.AddIndex( model_name='testtable', index=models.Index(fields=['test_field_int'], name='test_index'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0023_drop_index.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:53 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0022_add_index'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0024_add_index_with_condition.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:53 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0023_drop_index'), ] operations = [ migrations.AddIndex( model_name='testtable', index=models.Index( condition=models.Q(test_field_int__isnull=False), fields=['test_field_int'], name='test_index', ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0025_drop_index_with_condition.py ================================================ # Generated by Django 3.1 on 2019-09-22 20:54 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0024_add_index_with_condition'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0026_add_brin_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:38 import django.contrib.postgres.indexes from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0025_drop_index_with_condition'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.BrinIndex(fields=['test_field_int'], name='test_index'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0027_drop_brin_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:38 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0026_add_brin_index'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0028_add_brin_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:39 import django.contrib.postgres.indexes from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0027_drop_brin_index'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.BrinIndex( condition=models.Q(test_field_int__isnull=False), fields=['test_field_int'], name='test_index', ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0029_drop_brin_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:39 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0028_add_brin_index_with_condition'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0030_add_btree_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:45 import django.contrib.postgres.indexes from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0029_drop_brin_index_with_condition'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.BTreeIndex(fields=['test_field_int'], name='test_index'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0031_drop_btree_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:46 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0030_add_btree_index'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0032_add_btree_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:46 import django.contrib.postgres.indexes from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0031_drop_btree_index'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.BTreeIndex( condition=models.Q(test_field_int__isnull=False), fields=['test_field_int'], name='test_index', ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0033_drop_btree_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:46 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0032_add_btree_index_with_condition'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0034_add_gin_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:46 import django.contrib.postgres.indexes from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0033_drop_btree_index_with_condition'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.GinIndex(fields=['test_field_tsv'], name='test_index'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0035_drop_gin_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:47 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0034_add_gin_index'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0036_add_gin_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:47 import django.contrib.postgres.indexes from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0035_drop_gin_index'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.GinIndex( condition=models.Q(test_field_tsv__isnull=False), fields=['test_field_tsv'], name='test_index', ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0037_drop_gin_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:47 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0036_add_gin_index_with_condition'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0038_add_gist_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:47 import django.contrib.postgres.indexes from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0037_drop_gin_index_with_condition'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.GistIndex(fields=['test_field_tsv'], name='test_index'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0039_drop_gist_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:47 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0038_add_gist_index'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0040_add_gist_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:47 import django.contrib.postgres.indexes from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0039_drop_gist_index'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.GistIndex( condition=models.Q(test_field_tsv__isnull=False), fields=['test_field_tsv'], name='test_index', ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0041_drop_gist_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:48 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0040_add_gist_index_with_condition'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0042_add_hash_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:48 import django.contrib.postgres.indexes from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0041_drop_gist_index_with_condition'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.HashIndex(fields=['test_field_int'], name='test_index'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0043_drop_hash_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:48 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0042_add_hash_index'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0044_add_hash_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:48 import django.contrib.postgres.indexes from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0043_drop_hash_index'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.HashIndex( condition=models.Q(test_field_int__isnull=False), fields=['test_field_int'], name='test_index', ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0045_drop_hash_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:49 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0044_add_hash_index_with_condition'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0046_add_spgist_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:49 import django.contrib.postgres.indexes from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0045_drop_hash_index_with_condition'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.SpGistIndex(fields=['test_field_str'], name='test_index'), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0047_drop_spgist_index.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:49 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0046_add_spgist_index'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0048_add_spgist_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:49 import django.contrib.postgres.indexes from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0047_drop_spgist_index'), ] operations = [ migrations.AddIndex( model_name='testtable', index=django.contrib.postgres.indexes.SpGistIndex( condition=models.Q(test_field_str__isnull=False), fields=['test_field_str'], name='test_index', ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0049_drop_spgist_index_with_condition.py ================================================ # Generated by Django 3.0a1 on 2019-10-14 19:49 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0048_add_spgist_index_with_condition'), ] operations = [ migrations.RemoveIndex( model_name='testtable', name='test_index', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0050_add_unique_constraint_deferrable.py ================================================ # Generated by Django 4.2.20 on 2025-03-16 16:58 import django.db.models.constraints from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0049_drop_spgist_index_with_condition'), ] operations = [ migrations.AddConstraint( model_name='testtable', constraint=models.UniqueConstraint( deferrable=django.db.models.constraints.Deferrable['DEFERRED'], fields=('test_field_int',), name='test_uniq_constraint_deferred' ), ), migrations.AddConstraint( model_name='testtable', constraint=models.UniqueConstraint( deferrable=django.db.models.constraints.Deferrable['IMMEDIATE'], fields=('test_field_str',), name='test_uniq_constraint_immediate' ), ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/0051_drop_unique_constraint_deferrable.py ================================================ # Generated by Django 4.2.20 on 2025-03-16 16:58 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('good_flow_app', '0050_add_unique_constraint_deferrable'), ] operations = [ migrations.RemoveConstraint( model_name='testtable', name='test_uniq_constraint_deferred', ), migrations.RemoveConstraint( model_name='testtable', name='test_uniq_constraint_immediate', ), ] ================================================ FILE: tests/apps/good_flow_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_app/models.py ================================================ from django.contrib.postgres.search import SearchVectorField from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) test_field_tsv = SearchVectorField() class RelatedTestTable(models.Model): pass ================================================ FILE: tests/apps/good_flow_app_concurrently/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_app_concurrently/migrations/0001_initial.py ================================================ # Generated by Django 3.1 on 2019-09-21 20:09 import django.contrib.postgres.search from django.db import migrations, models def insert_objects(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('good_flow_app_concurrently', 'TestTable') TestTable.objects.using(db_alias).create(test_field_int=1) class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='RelatedTestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ], ), migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('test_field_int', models.IntegerField()), ('test_field_str', models.CharField(max_length=10)), ('test_field_tsv', django.contrib.postgres.search.SearchVectorField()), ], ), migrations.RunPython(insert_objects, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/good_flow_app_concurrently/migrations/0002_auto_20191210_2147.py ================================================ # Generated by Django 3.0a1 on 2019-12-10 21:47 from django.contrib.postgres.operations import AddIndexConcurrently from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ('good_flow_app_concurrently', '0001_initial'), ] operations = [ AddIndexConcurrently( model_name='testtable', index=models.Index(fields=['test_field_int'], name='good_flow_a_test_fi_0b7e6f_idx'), ), ] ================================================ FILE: tests/apps/good_flow_app_concurrently/migrations/0003_auto_20191210_2148.py ================================================ # Generated by Django 3.0a1 on 2019-12-10 21:48 from django.contrib.postgres.operations import RemoveIndexConcurrently from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ('good_flow_app_concurrently', '0002_auto_20191210_2147'), ] operations = [ RemoveIndexConcurrently( model_name='testtable', name='good_flow_a_test_fi_0b7e6f_idx', ), ] ================================================ FILE: tests/apps/good_flow_app_concurrently/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_app_concurrently/models.py ================================================ from django.contrib.postgres.search import SearchVectorField from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) test_field_tsv = SearchVectorField() class RelatedTestTable(models.Model): pass ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0001_initial.py ================================================ import django.db.models.deletion import django.db.models.functions.math from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="TestTableMain", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("main_id", models.IntegerField(null=True, unique=True)), ("field_u1", models.IntegerField(null=True)), ("field_u2", models.IntegerField(null=True)), ("field_u3", models.IntegerField(null=True)), ("field_u4", models.IntegerField(null=True)), ("field_u5", models.IntegerField(null=True)), ("field_u6", models.IntegerField(null=True)), ("field_u7", models.IntegerField(null=True)), ("field_i1", models.IntegerField(null=True)), ("field_i2", models.IntegerField(null=True)), ("field_i3", models.IntegerField(null=True)), ("field_i4", models.IntegerField(null=True)), ("field_i5", models.IntegerField(null=True)), ("field_i6", models.IntegerField(null=True)), ("field_i7", models.IntegerField(null=True)), ], options={ "db_table": "drop_col_test_table_main", }, ), migrations.CreateModel( name="TestTableParent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ], options={ "db_table": "drop_col_test_table_parent", }, ), migrations.CreateModel( name="TestTableChild", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "main", models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to="good_flow_drop_column_with_constraints.testtablemain", to_field="main_id", ), ), ], options={ "db_table": "drop_col_test_table_child", }, ), migrations.AddField( model_name="testtablemain", name="parent", field=models.OneToOneField( null=True, on_delete=django.db.models.deletion.CASCADE, to="good_flow_drop_column_with_constraints.testtableparent", ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( models.F("parent"), models.F("field_i1"), name="drop_col_i1" ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index(fields=["parent", "field_i2"], name="drop_col_i2"), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( django.db.models.functions.math.Abs("field_i3"), name="drop_col_i3" ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( models.F("parent"), condition=models.Q(("field_i4__gt", 0)), name="drop_col_i4", ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( condition=models.Q(("field_i5__gt", 0)), fields=["parent"], name="drop_col_i5", ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( models.F("parent"), include=("field_i6",), name="drop_col_i6" ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( fields=["parent"], include=("field_i7",), name="drop_col_i7" ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( models.F("parent"), models.F("field_u1"), name="drop_col_u1" ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( fields=("parent", "field_u2"), name="drop_col_u2" ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( django.db.models.functions.math.Abs("field_u3"), name="drop_col_u3" ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( models.F("parent"), condition=models.Q(("field_u4__gt", 0)), name="drop_col_u4", ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( condition=models.Q(("field_u5__gt", 0)), fields=("parent",), name="drop_col_u5", ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( models.F("parent"), include=("field_u6",), name="drop_col_u6" ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( fields=("parent",), include=("field_u7",), name="drop_col_u7" ), ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0002_remove_testtablemain_drop_col_u1_and_more.py ================================================ from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("good_flow_drop_column_with_constraints", "0001_initial"), ] operations = [ # emulate worst case untracked constraints migrations.SeparateDatabaseAndState( database_operations=[ # as constraint dropped with cascade or explicitly before cascade # we need to back untracked constraint creation to make migration revert happy migrations.RunSQL( migrations.RunSQL.noop, """ ALTER TABLE "drop_col_test_table_main" ADD CONSTRAINT "drop_col_u2" UNIQUE ("parent_id", "field_u2"); """ ) ], state_operations=[ migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u1", ), migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u2", ), migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u3", ), migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u4", ), migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u5", ), migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u6", ), migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u7", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i1", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i2", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i3", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i4", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i5", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i6", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i7", ), migrations.RemoveField( model_name="testtablechild", name="main", ), ], ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0003_remove_testtablemain_field_i7.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0002_remove_testtablemain_drop_col_u1_and_more", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i7", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0004_remove_testtablemain_field_i6.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0003_remove_testtablemain_field_i7", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i6", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0005_remove_testtablemain_field_i5.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0004_remove_testtablemain_field_i6", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i5", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0006_remove_testtablemain_field_i4.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0005_remove_testtablemain_field_i5", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i4", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0007_remove_testtablemain_field_i3.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0006_remove_testtablemain_field_i4", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i3", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0008_remove_testtablemain_field_i2.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0007_remove_testtablemain_field_i3", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i2", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0009_remove_testtablemain_field_i1.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0008_remove_testtablemain_field_i2", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i1", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0010_remove_testtablemain_field_u7.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0009_remove_testtablemain_field_i1", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u7", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0011_remove_testtablemain_field_u6.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0010_remove_testtablemain_field_u7", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u6", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0012_remove_testtablemain_field_u5.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0011_remove_testtablemain_field_u6", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u5", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0013_remove_testtablemain_field_u4.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0012_remove_testtablemain_field_u5", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u4", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0014_remove_testtablemain_field_u3.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0013_remove_testtablemain_field_u4", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u3", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0015_remove_testtablemain_field_u2.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0014_remove_testtablemain_field_u3", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u2", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0016_remove_testtablemain_field_u1.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0015_remove_testtablemain_field_u2", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u1", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0017_remove_testtablemain_main_id.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints", "0016_remove_testtablemain_field_u1", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="main_id", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/0018_remove_testtablemain_parent.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ("good_flow_drop_column_with_constraints", "0017_remove_testtablemain_main_id"), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="parent", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints/models.py ================================================ from django.db import models # from django.db.models.functions import Abs class TestTableParent(models.Model): class Meta: db_table = 'drop_col_test_table_parent' class TestTableMain(models.Model): # parent = models.OneToOneField(TestTableParent, null=True, on_delete=models.CASCADE) # main_id = models.IntegerField(null=True, unique=True) # field_u1 = models.IntegerField(null=True) # field_u2 = models.IntegerField(null=True) # field_u3 = models.IntegerField(null=True) # field_u4 = models.IntegerField(null=True) # field_u5 = models.IntegerField(null=True) # field_u6 = models.IntegerField(null=True) # field_u7 = models.IntegerField(null=True) # field_i1 = models.IntegerField(null=True) # field_i2 = models.IntegerField(null=True) # field_i3 = models.IntegerField(null=True) # field_i4 = models.IntegerField(null=True) # field_i5 = models.IntegerField(null=True) # field_i6 = models.IntegerField(null=True) # field_i7 = models.IntegerField(null=True) class Meta: db_table = 'drop_col_test_table_main' # constraints = [ # models.UniqueConstraint("parent", "field_u1", name="drop_col_u1"), # models.UniqueConstraint(fields=["parent", "field_u2"], name="drop_col_u2"), # models.UniqueConstraint(Abs("field_u3"), name="drop_col_u3"), # models.UniqueConstraint("parent", name="drop_col_u4", condition=models.Q(field_u4__gt=0)), # models.UniqueConstraint(fields=["parent"], name="drop_col_u5", condition=models.Q(field_u5__gt=0)), # models.UniqueConstraint("parent", name="drop_col_u6", include=["field_u6"]), # models.UniqueConstraint(fields=["parent"], name="drop_col_u7", include=["field_u7"]), # ] # indexes = [ # models.Index("parent", "field_i1", name="drop_col_i1"), # models.Index(fields=["parent", "field_i2"], name="drop_col_i2"), # models.Index(Abs("field_i3"), name="drop_col_i3"), # models.Index("parent", name="drop_col_i4", condition=models.Q(field_i4__gt=0)), # models.Index(fields=["parent"], name="drop_col_i5", condition=models.Q(field_i5__gt=0)), # models.Index("parent", name="drop_col_i6", include=["field_i6"]), # models.Index(fields=["parent"], name="drop_col_i7", include=["field_i7"]), # ] class TestTableChild(models.Model): # main = models.ForeignKey(TestTableMain, to_field="main_id", null=True, on_delete=models.CASCADE) class Meta: db_table = 'drop_col_test_table_child' ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0001_initial.py ================================================ import django.db.models.deletion import django.db.models.functions.math from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="TestTableMain", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("main_id", models.IntegerField(null=True, unique=True)), ("field_u2", models.IntegerField(null=True)), ("field_u5", models.IntegerField(null=True)), ("field_u7", models.IntegerField(null=True)), ("field_i1", models.IntegerField(null=True)), ("field_i2", models.IntegerField(null=True)), ("field_i3", models.IntegerField(null=True)), ("field_i4", models.IntegerField(null=True)), ("field_i5", models.IntegerField(null=True)), ("field_i6", models.IntegerField(null=True)), ("field_i7", models.IntegerField(null=True)), ], options={ "db_table": "drop_col_test_table_main", }, ), migrations.CreateModel( name="TestTableParent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ], options={ "db_table": "drop_col_test_table_parent", }, ), migrations.CreateModel( name="TestTableChild", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "main", models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to="good_flow_drop_column_with_constraints_old.testtablemain", to_field="main_id", ), ), ], options={ "db_table": "drop_col_test_table_child", }, ), migrations.AddField( model_name="testtablemain", name="parent", field=models.OneToOneField( null=True, on_delete=django.db.models.deletion.CASCADE, to="good_flow_drop_column_with_constraints_old.testtableparent", ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( models.F("parent"), models.F("field_i1"), name="drop_col_i1" ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index(fields=["parent", "field_i2"], name="drop_col_i2"), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( django.db.models.functions.math.Abs("field_i3"), name="drop_col_i3" ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( models.F("parent"), condition=models.Q(("field_i4__gt", 0)), name="drop_col_i4", ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( condition=models.Q(("field_i5__gt", 0)), fields=["parent"], name="drop_col_i5", ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( models.F("parent"), include=("field_i6",), name="drop_col_i6" ), ), migrations.AddIndex( model_name="testtablemain", index=models.Index( fields=["parent"], include=("field_i7",), name="drop_col_i7" ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( fields=("parent", "field_u2"), name="drop_col_u2" ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( condition=models.Q(("field_u5__gt", 0)), fields=("parent",), name="drop_col_u5", ), ), migrations.AddConstraint( model_name="testtablemain", constraint=models.UniqueConstraint( fields=("parent",), include=("field_u7",), name="drop_col_u7" ), ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0002_remove_testtablemain_drop_col_u2_and_more.py ================================================ from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("good_flow_drop_column_with_constraints_old", "0001_initial"), ] operations = [ # emulate worst case untracked constraints migrations.SeparateDatabaseAndState( database_operations=[ # as constraint dropped with cascade or explicitly before cascade # we need to back untracked constraint creation to make migration revert happy migrations.RunSQL( migrations.RunSQL.noop, """ ALTER TABLE "drop_col_test_table_main" ADD CONSTRAINT "drop_col_u2" UNIQUE ("parent_id", "field_u2"); """ ) ], state_operations=[ migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u2", ), migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u5", ), migrations.RemoveConstraint( model_name="testtablemain", name="drop_col_u7", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i1", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i2", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i3", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i4", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i5", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i6", ), migrations.RemoveIndex( model_name="testtablemain", name="drop_col_i7", ), migrations.RemoveField( model_name="testtablechild", name="main", ), ], ) ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0003_remove_testtablemain_field_i7.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0002_remove_testtablemain_drop_col_u2_and_more", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i7", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0004_remove_testtablemain_field_i6.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0003_remove_testtablemain_field_i7", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i6", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0005_remove_testtablemain_field_i5.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0004_remove_testtablemain_field_i6", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i5", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0006_remove_testtablemain_field_i4.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0005_remove_testtablemain_field_i5", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i4", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0007_remove_testtablemain_field_i3.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0006_remove_testtablemain_field_i4", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i3", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0008_remove_testtablemain_field_i2.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0007_remove_testtablemain_field_i3", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i2", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0009_remove_testtablemain_field_i1.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0008_remove_testtablemain_field_i2", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_i1", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0010_remove_testtablemain_field_u7.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0009_remove_testtablemain_field_i1", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u7", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0011_remove_testtablemain_field_u5.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0010_remove_testtablemain_field_u7", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u5", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0012_remove_testtablemain_field_u2.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0011_remove_testtablemain_field_u5", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="field_u2", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0013_remove_testtablemain_main_id.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0012_remove_testtablemain_field_u2", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="main_id", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/0014_remove_testtablemain_parent.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ( "good_flow_drop_column_with_constraints_old", "0013_remove_testtablemain_main_id", ), ] operations = [ migrations.RemoveField( model_name="testtablemain", name="parent", ), ] ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_drop_column_with_constraints_old/models.py ================================================ from django.db import models # from django.db.models.functions import Abs class TestTableParent(models.Model): class Meta: db_table = 'drop_col_test_table_parent' class TestTableMain(models.Model): # parent = models.OneToOneField(TestTableParent, null=True, on_delete=models.CASCADE) # main_id = models.IntegerField(null=True, unique=True) # field_u2 = models.IntegerField(null=True) # field_u5 = models.IntegerField(null=True) # field_u7 = models.IntegerField(null=True) # field_i1 = models.IntegerField(null=True) # field_i2 = models.IntegerField(null=True) # field_i3 = models.IntegerField(null=True) # field_i4 = models.IntegerField(null=True) # field_i5 = models.IntegerField(null=True) # field_i6 = models.IntegerField(null=True) # field_i7 = models.IntegerField(null=True) class Meta: db_table = 'drop_col_test_table_main' # constraints = [ # models.UniqueConstraint(fields=["parent", "field_u2"], name="drop_col_u2"), # models.UniqueConstraint(fields=["parent"], name="drop_col_u5", condition=models.Q(field_u5__gt=0)), # models.UniqueConstraint(fields=["parent"], name="drop_col_u7", include=["field_u7"]), # ] # indexes = [ # models.Index("parent", "field_i1", name="drop_col_i1"), # models.Index(fields=["parent", "field_i2"], name="drop_col_i2"), # models.Index(Abs("field_i3"), name="drop_col_i3"), # models.Index("parent", name="drop_col_i4", condition=models.Q(field_i4__gt=0)), # models.Index(fields=["parent"], name="drop_col_i5", condition=models.Q(field_i5__gt=0)), # models.Index("parent", name="drop_col_i6", include=["field_i6"]), # models.Index(fields=["parent"], name="drop_col_i7", include=["field_i7"]), # ] class TestTableChild(models.Model): # main = models.ForeignKey(TestTableMain, to_field="main_id", null=True, on_delete=models.CASCADE) class Meta: db_table = 'drop_col_test_table_child' ================================================ FILE: tests/apps/good_flow_drop_table_with_constraints/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_drop_table_with_constraints/migrations/0001_initial.py ================================================ import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="TestTableMain", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("main_id", models.IntegerField(null=True, unique=True)), ], options={ "db_table": "drop_tbl_test_table_main", }, ), migrations.CreateModel( name="TestTableParent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ], options={ "db_table": "drop_tbl_test_table_parent", }, ), migrations.CreateModel( name="TestTableChild", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "main", models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to="good_flow_drop_table_with_constraints.testtablemain", to_field="main_id", ), ), ], options={ "db_table": "drop_tbl_test_table_child", }, ), migrations.AddField( model_name="testtablemain", name="parent", field=models.OneToOneField( null=True, on_delete=django.db.models.deletion.CASCADE, to="good_flow_drop_table_with_constraints.testtableparent", ), ), ] ================================================ FILE: tests/apps/good_flow_drop_table_with_constraints/migrations/0002_remove_testtablechild_main.py ================================================ from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("good_flow_drop_table_with_constraints", "0001_initial"), ] operations = [ # emulate worst case untracked constraints migrations.SeparateDatabaseAndState( database_operations=[], state_operations=[ migrations.RemoveField( model_name="testtablechild", name="main", ), ], ), ] ================================================ FILE: tests/apps/good_flow_drop_table_with_constraints/migrations/0003_delete_testtablemain.py ================================================ from django.db import migrations class Migration(migrations.Migration): atomic = False dependencies = [ ("good_flow_drop_table_with_constraints", "0002_remove_testtablechild_main"), ] operations = [ migrations.DeleteModel( name="TestTableMain", ), ] ================================================ FILE: tests/apps/good_flow_drop_table_with_constraints/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/good_flow_drop_table_with_constraints/models.py ================================================ from django.db import models class TestTableParent(models.Model): class Meta: db_table = 'drop_tbl_test_table_parent' # class TestTableMain(models.Model): # parent = models.OneToOneField(TestTableParent, null=True, on_delete=models.CASCADE) # main_id = models.IntegerField(null=True, unique=True) # # class Meta: # db_table = 'drop_tbl_test_table_main' class TestTableChild(models.Model): # main = models.ForeignKey(TestTableMain, to_field="main_id", null=True, on_delete=models.CASCADE) class Meta: db_table = 'drop_tbl_test_table_child' ================================================ FILE: tests/apps/idempotency_add_auto_field_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_auto_field_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ("test_field_int", models.IntegerField(primary_key=True, serialize=False)), ], ), migrations.CreateModel( name="TestTable", fields=[ ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_auto_field_app/migrations/0002_alter_relatedtesttable_test_field_int.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_auto_field_app", "0001_initial"), ] operations = [ migrations.AlterField( model_name="relatedtesttable", name="test_field_int", field=models.AutoField(primary_key=True, serialize=False), ), ] ================================================ FILE: tests/apps/idempotency_add_auto_field_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_auto_field_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.AutoField(primary_key=True) ================================================ FILE: tests/apps/idempotency_add_check_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_check_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_check_app/migrations/0002_relatedtesttable_idempotency_add_check_app_relatedtesttable_check.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_check_app", "0001_initial"), ] operations = [ migrations.AddConstraint( model_name="relatedtesttable", constraint=models.CheckConstraint( check=models.Q(("test_field_int__gt", 0)), name="idempotency_add_check_app_relatedtesttable_check", ), ), ] ================================================ FILE: tests/apps/idempotency_add_check_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_check_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField(null=True) class Meta: constraints = [ models.CheckConstraint( check=models.Q(test_field_int__gt=0), name="idempotency_add_check_app_relatedtesttable_check", ) ] ================================================ FILE: tests/apps/idempotency_add_column_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_column_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_column_app/migrations/0002_relatedtesttable_test_field_str.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_column_app", "0001_initial"), ] operations = [ migrations.AddField( model_name="relatedtesttable", name="test_field_str", field=models.CharField(max_length=10, null=True), ), ] ================================================ FILE: tests/apps/idempotency_add_column_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_column_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField(null=True) test_field_str = models.CharField(max_length=10, null=True) ================================================ FILE: tests/apps/idempotency_add_column_foreign_key_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_column_foreign_key_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_column_foreign_key_app/migrations/0002_relatedtesttable_test_model.py ================================================ import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_column_foreign_key_app", "0001_initial"), ] operations = [ migrations.AddField( model_name="relatedtesttable", name="test_model", field=models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to="idempotency_add_column_foreign_key_app.testtable", ), ), ] ================================================ FILE: tests/apps/idempotency_add_column_foreign_key_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_column_foreign_key_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField(null=True) test_model = models.ForeignKey(TestTable, null=True, on_delete=models.CASCADE) ================================================ FILE: tests/apps/idempotency_add_column_one_to_one_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_column_one_to_one_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_column_one_to_one_app/migrations/0002_relatedtesttable_test_model.py ================================================ import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_column_one_to_one_app", "0001_initial"), ] operations = [ migrations.AddField( model_name="relatedtesttable", name="test_model", field=models.OneToOneField( null=True, on_delete=django.db.models.deletion.CASCADE, to="idempotency_add_column_one_to_one_app.testtable", ), ), ] ================================================ FILE: tests/apps/idempotency_add_column_one_to_one_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_column_one_to_one_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField(null=True) test_model = models.OneToOneField(TestTable, null=True, on_delete=models.CASCADE) ================================================ FILE: tests/apps/idempotency_add_foreign_key_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_foreign_key_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_foreign_key_app/migrations/0002_alter_relatedtesttable_test_field_int.py ================================================ import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_foreign_key_app", "0001_initial"), ] operations = [ migrations.AlterField( model_name="relatedtesttable", name="test_field_int", field=models.ForeignKey( db_column="test_field_int", null=True, on_delete=django.db.models.deletion.CASCADE, to="idempotency_add_foreign_key_app.testtable", ), ), ] ================================================ FILE: tests/apps/idempotency_add_foreign_key_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_foreign_key_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.ForeignKey( TestTable, null=True, on_delete=models.CASCADE, db_column="test_field_int", ) ================================================ FILE: tests/apps/idempotency_add_index_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_index_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_index_app/migrations/0002_alter_relatedtesttable_test_field_int.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_index_app", "0001_initial"), ] operations = [ migrations.AlterField( model_name="relatedtesttable", name="test_field_int", field=models.IntegerField(db_index=True, null=True), ), ] ================================================ FILE: tests/apps/idempotency_add_index_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_index_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField(null=True, db_index=True) ================================================ FILE: tests/apps/idempotency_add_index_meta_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_index_meta_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_index_meta_app/migrations/0002_relatedtesttable_relatedtesttable_idx.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_index_meta_app", "0001_initial"), ] operations = [ migrations.AddIndex( model_name="relatedtesttable", index=models.Index( fields=["test_field_int", "test_field_str"], name="relatedtesttable_idx" ), ), ] ================================================ FILE: tests/apps/idempotency_add_index_meta_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_index_meta_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class Meta: indexes = [ models.Index( name="relatedtesttable_idx", fields=["test_field_int", "test_field_str"], ) ] ================================================ FILE: tests/apps/idempotency_add_one_to_one_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_one_to_one_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_one_to_one_app/migrations/0002_alter_relatedtesttable_test_field_int.py ================================================ import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_one_to_one_app", "0001_initial"), ] operations = [ migrations.AlterField( model_name="relatedtesttable", name="test_field_int", field=models.OneToOneField( db_column="test_field_int", null=True, on_delete=django.db.models.deletion.CASCADE, to="idempotency_add_one_to_one_app.testtable", ), ), ] ================================================ FILE: tests/apps/idempotency_add_one_to_one_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_one_to_one_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.OneToOneField( TestTable, null=True, on_delete=models.CASCADE, db_column="test_field_int", ) ================================================ FILE: tests/apps/idempotency_add_primary_key_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_primary_key_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_primary_key_app/migrations/0002_remove_relatedtesttable_id_and_more.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_primary_key_app", "0001_initial"), ] operations = [ migrations.RemoveField( model_name="relatedtesttable", name="id", ), migrations.AlterField( model_name="relatedtesttable", name="test_field_int", field=models.IntegerField(primary_key=True, serialize=False), ), ] ================================================ FILE: tests/apps/idempotency_add_primary_key_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_primary_key_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField(primary_key=True) ================================================ FILE: tests/apps/idempotency_add_unique_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_unique_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_unique_app/migrations/0002_alter_relatedtesttable_test_field_int.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_unique_app", "0001_initial"), ] operations = [ migrations.AlterField( model_name="relatedtesttable", name="test_field_int", field=models.IntegerField(null=True, unique=True), ), ] ================================================ FILE: tests/apps/idempotency_add_unique_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_unique_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField(null=True, unique=True) ================================================ FILE: tests/apps/idempotency_add_unique_meta_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_unique_meta_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_add_unique_meta_app/migrations/0002_relatedtesttable_relatedtesttable_uniq.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_add_unique_meta_app", "0001_initial"), ] operations = [ migrations.AddConstraint( model_name="relatedtesttable", constraint=models.UniqueConstraint( fields=("test_field_int", "test_field_str"), name="relatedtesttable_uniq", ), ), ] ================================================ FILE: tests/apps/idempotency_add_unique_meta_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_add_unique_meta_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class Meta: constraints = [ models.UniqueConstraint( name="relatedtesttable_uniq", fields=["test_field_int", "test_field_str"], ) ] ================================================ FILE: tests/apps/idempotency_create_table_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_create_table_app/migrations/0001_initial.py ================================================ from django.db import migrations, models def insert_objects(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model("idempotency_create_table_app", "TestTable") TestTable.objects.using(db_alias).create(test_field_int=1) class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="TestTable", fields=[ ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), migrations.RunPython(insert_objects, migrations.RunPython.noop), ] ================================================ FILE: tests/apps/idempotency_create_table_app/migrations/0002_relatedtesttable_and_more.py ================================================ # Generated by Django 5.0 on 2024-04-22 15:54 import django.db.models.deletion from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_create_table_app", "0001_initial"), ] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ( "test_model", models.ForeignKey( null=True, on_delete=django.db.models.deletion.CASCADE, to="idempotency_create_table_app.testtable", ), ), ], ), migrations.AddConstraint( model_name="relatedtesttable", constraint=models.UniqueConstraint( fields=("test_model", "test_field_int"), name="idempotency_create_table_app_relatedtesttable_uniq", ), ), ] ================================================ FILE: tests/apps/idempotency_create_table_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_create_table_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_model = models.ForeignKey(TestTable, null=True, on_delete=models.CASCADE) test_field_int = models.IntegerField(null=True) class Meta: constraints = [ models.UniqueConstraint( name="idempotency_create_table_app_relatedtesttable_uniq", fields=["test_model", "test_field_int"], ) ] ================================================ FILE: tests/apps/idempotency_set_not_null_app/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_set_not_null_app/migrations/0001_initial.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [] operations = [ migrations.CreateModel( name="RelatedTestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField(null=True)), ], ), migrations.CreateModel( name="TestTable", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("test_field_int", models.IntegerField()), ("test_field_str", models.CharField(max_length=10)), ], ), ] ================================================ FILE: tests/apps/idempotency_set_not_null_app/migrations/0002_alter_relatedtesttable_test_field_int.py ================================================ from django.db import migrations, models class Migration(migrations.Migration): atomic = False dependencies = [ ("idempotency_set_not_null_app", "0001_initial"), ] operations = [ migrations.AlterField( model_name="relatedtesttable", name="test_field_int", field=models.IntegerField(), ), ] ================================================ FILE: tests/apps/idempotency_set_not_null_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/idempotency_set_not_null_app/models.py ================================================ from django.db import models class TestTable(models.Model): test_field_int = models.IntegerField() test_field_str = models.CharField(max_length=10) class RelatedTestTable(models.Model): test_field_int = models.IntegerField() ================================================ FILE: tests/apps/old_notnull_check_constraint_migration_app/__init__.py ================================================ ================================================ FILE: tests/apps/old_notnull_check_constraint_migration_app/migrations/0001_initial.py ================================================ # Generated by Django 2.2.5 on 2019-09-23 23:28 from django.db import migrations, models def insert_objects(apps, schema_editor): db_alias = schema_editor.connection.alias TestTable = apps.get_model('old_notnull_check_constraint_migration_app', 'TestTable') TestTable.objects.using(db_alias).create(field=1) class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='TestTable', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('field', models.IntegerField()), ], ), migrations.RunPython(insert_objects, migrations.RunPython.noop), migrations.RunSQL( 'ALTER TABLE old_notnull_check_constraint_migration_app_testtable ' 'ADD CONSTRAINT old_constraint_notnull CHECK (field IS NOT NULL)', migrations.RunSQL.noop, ), migrations.RunSQL( 'ALTER TABLE old_notnull_check_constraint_migration_app_testtable ' 'ALTER COLUMN field DROP NOT NULL', migrations.RunSQL.noop, ), ] ================================================ FILE: tests/apps/old_notnull_check_constraint_migration_app/migrations/__init__.py ================================================ ================================================ FILE: tests/apps/old_notnull_check_constraint_migration_app/models.py ================================================ from django.db import models class TestTable(models.Model): field = models.IntegerField() ================================================ FILE: tests/integration/__init__.py ================================================ import contextlib import os import subprocess from typing import List from django.conf import settings from django.db import connection from django.db.backends.postgresql.base import Database def one_line_sql(sql: str) -> str: return sql.replace(" ", "").replace("\n", " ").replace("( ", "(").replace(" )", ")").replace(" ", " ").strip() def split_sql_queries(sql: str) -> List[str]: return [line for line in sql.splitlines() if line and not line.startswith("--")] def pg_dump(table: str) -> str: host = settings.DATABASES["default"]["HOST"] port = settings.DATABASES["default"]["PORT"] name = settings.DATABASES["default"]["NAME"] user = settings.DB_SUPER_USER password = settings.DB_SUPER_PASSWORD env = os.environ.copy() | {"PGPASSWORD": password} cmd = f"pg_dump -h {host} -p {port} -U {user} -d {name} -s -t {table} --restrict-key=test" popen = subprocess.run(cmd, env=env, text=True, shell=True, capture_output=True, check=True) return popen.stdout @contextlib.contextmanager def super_user_cursor(): host = settings.DATABASES["default"]["HOST"] port = settings.DATABASES["default"]["PORT"] name = settings.DATABASES["default"]["NAME"] user = settings.DB_SUPER_USER password = settings.DB_SUPER_PASSWORD conn = Database.connect(f"host={host} port={port} dbname={name} user={user} password={password}") try: conn.autocommit = True cursor = conn.cursor() try: yield cursor finally: cursor.close() finally: conn.close() def make_index_invalid(table: str, index: str): with super_user_cursor() as cursor: cursor.execute(""" UPDATE pg_index SET indisvalid = false WHERE indrelid = %s::regclass::oid AND indexrelid = %s::regclass::oid """, [table, index]) assert not is_valid_index(table, index) def is_valid_index(table: str, index: str) -> bool: with connection.cursor() as cursor: cursor.execute(""" SELECT indisvalid FROM pg_index WHERE indrelid = %s::regclass::oid AND indexrelid = %s::regclass::oid """, [table, index]) data = cursor.fetchone() if data is None: raise ValueError(f"index {index} not found for {table}") return data[0] def is_valid_constraint(table: str, constraint: str) -> bool: with connection.cursor() as cursor: cursor.execute(""" SELECT convalidated FROM pg_constraint WHERE conrelid = %s::regclass::oid AND conname = %s """, [table, constraint]) data = cursor.fetchone() if data is None: raise ValueError(f"constraint {constraint} not found for {table}") return data[0] ================================================ FILE: tests/integration/test_migrations.py ================================================ import os import textwrap import django from django.apps import apps from django.core.management import call_command from django.db import connection from django.test import modify_settings, override_settings import pytest from django_zero_downtime_migrations.backends.postgres.schema import ( UnsafeOperationException ) from tests import skip_for_default_django_backend from tests.integration import ( is_valid_constraint, is_valid_index, make_index_invalid, one_line_sql, pg_dump, split_sql_queries ) @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.good_flow_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_sqlmigrate_with_idempotent_mode(): app_config = apps.get_app_config("good_flow_app") for migration in os.listdir(os.path.join(app_config.path, "migrations")): if not migration.startswith("_") and migration.endswith(".py"): call_command("sqlmigrate", "good_flow_app", migration[:4]) @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.good_flow_alter_table_with_same_db_table"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_good_flow_alter_table_with_same_db_table(): # forward call_command("migrate", "good_flow_alter_table_with_same_db_table") # backward call_command("migrate", "good_flow_alter_table_with_same_db_table", "zero") @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.good_flow_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_good_flow(): # forward call_command("migrate", "good_flow_app") # backward call_command("migrate", "good_flow_app", "zero") @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.good_flow_app_concurrently"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_good_flow_create_and_drop_index_concurrently(): # forward call_command("migrate", "good_flow_app_concurrently") # backward call_command("migrate", "good_flow_app_concurrently", "zero") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.bad_rollback_flow_drop_column_with_notnull_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_bad_rollback_flow_drop_column_with_notnull(): # forward call_command("migrate", "bad_rollback_flow_drop_column_with_notnull_app") # backward with pytest.raises(UnsafeOperationException): call_command("migrate", "bad_rollback_flow_drop_column_with_notnull_app", "0001") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.bad_rollback_flow_drop_column_with_notnull_default_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_bad_rollback_flow_drop_column_with_notnull_default(): # forward call_command("migrate", "bad_rollback_flow_drop_column_with_notnull_default_app") # backward with pytest.raises(UnsafeOperationException): call_command("migrate", "bad_rollback_flow_drop_column_with_notnull_default_app", "0001") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.bad_rollback_flow_change_char_type_that_safe_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_bad_rollback_flow_change_char_type_that_safe(): # forward call_command("migrate", "bad_rollback_flow_change_char_type_that_safe_app") # backward with pytest.raises(UnsafeOperationException): call_command("migrate", "bad_rollback_flow_change_char_type_that_safe_app", "0001") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.bad_flow_add_column_with_default_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_bad_flow_add_column_with_default(): # forward with pytest.raises(UnsafeOperationException): call_command("migrate", "bad_flow_add_column_with_default_app") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.bad_flow_add_column_with_notnull_default_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_bad_flow_add_column_with_notnull_default(): # forward with pytest.raises(UnsafeOperationException): call_command("migrate", "bad_flow_add_column_with_notnull_default_app") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.bad_flow_add_column_with_notnull_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_bad_flow_add_column_with_notnull(): # forward with pytest.raises(UnsafeOperationException): call_command("migrate", "bad_flow_add_column_with_notnull_app") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.bad_flow_change_char_type_that_unsafe_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_bad_flow_change_char_type_that_unsafe(): # forward with pytest.raises(UnsafeOperationException): call_command("migrate", "bad_flow_change_char_type_that_unsafe_app") @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.decimal_to_float_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False) def test_decimal_to_float_app(): # forward call_command("migrate", "decimal_to_float_app") # backward call_command("migrate", "decimal_to_float_app", "zero") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.good_flow_drop_table_with_constraints"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_good_flow_drop_table_with_constraints(): with override_settings(ZERO_DOWNTIME_MIGRATIONS_EXPLICIT_CONSTRAINTS_DROP=False): call_command("migrate", "good_flow_drop_table_with_constraints") drop_tbl_test_table_parent_schema = pg_dump("drop_tbl_test_table_parent") drop_tbl_test_table_child_schema = pg_dump("drop_tbl_test_table_child") call_command("migrate", "good_flow_drop_table_with_constraints", "zero") _drop_child_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "drop_tbl_test_table__main_id_8a4874b6_fk_drop_tbl_" IMMEDIATE; ALTER TABLE "drop_tbl_test_table_child" DROP CONSTRAINT "drop_tbl_test_table__main_id_8a4874b6_fk_drop_tbl_"; """) _drop_main_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "drop_tbl_test_table__parent_id_5c6ff8d9_fk_drop_tbl_" IMMEDIATE; ALTER TABLE "drop_tbl_test_table_main" DROP CONSTRAINT "drop_tbl_test_table__parent_id_5c6ff8d9_fk_drop_tbl_"; """) _drop_table_sql = one_line_sql(""" DROP TABLE "drop_tbl_test_table_main" CASCADE; """) call_command("migrate", "good_flow_drop_table_with_constraints", "0002") migration_sql = call_command("sqlmigrate", "good_flow_drop_table_with_constraints", "0003") assert split_sql_queries(migration_sql) == [ _drop_child_foreign_key_constraint_sql, _drop_main_foreign_key_constraint_sql, _drop_table_sql, ] call_command("migrate", "good_flow_drop_table_with_constraints") assert pg_dump("drop_tbl_test_table_parent") == drop_tbl_test_table_parent_schema assert pg_dump("drop_tbl_test_table_child") == drop_tbl_test_table_child_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.good_flow_drop_column_with_constraints"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_good_flow_drop_column_with_constraints(): with override_settings(ZERO_DOWNTIME_MIGRATIONS_EXPLICIT_CONSTRAINTS_DROP=False): call_command("migrate", "good_flow_drop_column_with_constraints") drop_col_test_table_parent_schema = pg_dump("drop_col_test_table_parent") drop_col_test_table_main_schema = pg_dump("drop_col_test_table_main") drop_col_test_table_child_schema = pg_dump("drop_col_test_table_child") call_command("migrate", "good_flow_drop_column_with_constraints", "zero") call_command("migrate", "good_flow_drop_column_with_constraints", "0002") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0003") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i7";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i7" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0003") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0004") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i6";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i6" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0004") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0005") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i5";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i5" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0005") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0006") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i4";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i4" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0006") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0007") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i3";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i3" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0007") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0008") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i2";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i2" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0008") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0009") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i1";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i1" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0009") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0010") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_u7";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u7" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0010") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0011") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_u6";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u6" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0011") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0012") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_u5";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u5" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0012") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0013") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_u4";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u4" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0013") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0014") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_u3";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u3" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0014") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0015") assert split_sql_queries(migration_sql) == [ 'ALTER TABLE "drop_col_test_table_main" DROP CONSTRAINT "drop_col_u2";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u2" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0015") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0016") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_u1";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u1" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints", "0016") _drop_main_id_child_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "drop_col_test_table__main_id_9da91a1c_fk_drop_col_" IMMEDIATE; ALTER TABLE "drop_col_test_table_child" DROP CONSTRAINT "drop_col_test_table__main_id_9da91a1c_fk_drop_col_"; """) _drop_main_id_field_unique_constraint = one_line_sql(""" ALTER TABLE "drop_col_test_table_main" DROP CONSTRAINT "drop_col_test_table_main_main_id_key"; """) _drop_main_id_column_sql = one_line_sql(""" ALTER TABLE "drop_col_test_table_main" DROP COLUMN "main_id" CASCADE; """) migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0017") assert split_sql_queries(migration_sql) == [ _drop_main_id_child_foreign_key_constraint_sql, _drop_main_id_field_unique_constraint, _drop_main_id_column_sql, ] call_command("migrate", "good_flow_drop_column_with_constraints", "0017") _drop_parent_id_main_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "drop_col_test_table__parent_id_55b0b5e6_fk_drop_col_" IMMEDIATE; ALTER TABLE "drop_col_test_table_main" DROP CONSTRAINT "drop_col_test_table__parent_id_55b0b5e6_fk_drop_col_"; """) _drop_parent_id_field_unique_constraint = one_line_sql(""" ALTER TABLE "drop_col_test_table_main" DROP CONSTRAINT "drop_col_test_table_main_parent_id_55b0b5e6_uniq"; """) _drop_parent_id_column_sql = one_line_sql(""" ALTER TABLE "drop_col_test_table_main" DROP COLUMN "parent_id" CASCADE; """) migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints", "0018") assert split_sql_queries(migration_sql) == [ _drop_parent_id_main_foreign_key_constraint_sql, _drop_parent_id_field_unique_constraint, _drop_parent_id_column_sql, ] call_command("migrate", "good_flow_drop_column_with_constraints", "0018") call_command("migrate", "good_flow_drop_column_with_constraints") assert pg_dump("drop_col_test_table_parent") == drop_col_test_table_parent_schema assert pg_dump("drop_col_test_table_main") == drop_col_test_table_main_schema assert pg_dump("drop_col_test_table_child") == drop_col_test_table_child_schema @skip_for_default_django_backend @pytest.mark.skipif( django.VERSION[:2] >= (4, 0), reason="django after 4.0 case", ) @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.good_flow_drop_column_with_constraints_old"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_good_flow_drop_column_with_constraints_old(): with override_settings(ZERO_DOWNTIME_MIGRATIONS_EXPLICIT_CONSTRAINTS_DROP=False): call_command("migrate", "good_flow_drop_column_with_constraints_old") drop_col_test_table_parent_schema = pg_dump("drop_col_test_table_parent") drop_col_test_table_main_schema = pg_dump("drop_col_test_table_main") drop_col_test_table_child_schema = pg_dump("drop_col_test_table_child") call_command("migrate", "good_flow_drop_column_with_constraints_old", "zero") call_command("migrate", "good_flow_drop_column_with_constraints_old", "0002") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0003") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i7";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i7" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0003") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0004") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i6";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i6" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0004") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0005") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i5";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i5" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0005") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0006") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i4";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i4" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0006") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0007") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i3";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i3" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0007") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0008") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i2";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i2" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0008") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0009") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_i1";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_i1" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0009") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0010") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_u7";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u7" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0010") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0011") assert split_sql_queries(migration_sql) == [ 'DROP INDEX CONCURRENTLY IF EXISTS "drop_col_u5";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u5" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0011") migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0012") assert split_sql_queries(migration_sql) == [ 'ALTER TABLE "drop_col_test_table_main" DROP CONSTRAINT "drop_col_u2";', 'ALTER TABLE "drop_col_test_table_main" DROP COLUMN "field_u2" CASCADE;', ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0012") _drop_main_id_child_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "drop_col_test_table__main_id_9da91a1c_fk_drop_col_" IMMEDIATE; ALTER TABLE "drop_col_test_table_child" DROP CONSTRAINT "drop_col_test_table__main_id_9da91a1c_fk_drop_col_"; """) _drop_main_id_field_unique_constraint = one_line_sql(""" ALTER TABLE "drop_col_test_table_main" DROP CONSTRAINT "drop_col_test_table_main_main_id_key"; """) _drop_main_id_column_sql = one_line_sql(""" ALTER TABLE "drop_col_test_table_main" DROP COLUMN "main_id" CASCADE; """) migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0013") assert split_sql_queries(migration_sql) == [ _drop_main_id_child_foreign_key_constraint_sql, _drop_main_id_field_unique_constraint, _drop_main_id_column_sql, ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0013") _drop_parent_id_main_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "drop_col_test_table__parent_id_55b0b5e6_fk_drop_col_" IMMEDIATE; ALTER TABLE "drop_col_test_table_main" DROP CONSTRAINT "drop_col_test_table__parent_id_55b0b5e6_fk_drop_col_"; """) _drop_parent_id_field_unique_constraint = one_line_sql(""" ALTER TABLE "drop_col_test_table_main" DROP CONSTRAINT "drop_col_test_table_main_parent_id_55b0b5e6_uniq"; """) _drop_parent_id_column_sql = one_line_sql(""" ALTER TABLE "drop_col_test_table_main" DROP COLUMN "parent_id" CASCADE; """) migration_sql = call_command("sqlmigrate", "good_flow_drop_column_with_constraints_old", "0014") assert split_sql_queries(migration_sql) == [ _drop_parent_id_main_foreign_key_constraint_sql, _drop_parent_id_field_unique_constraint, _drop_parent_id_column_sql, ] call_command("migrate", "good_flow_drop_column_with_constraints_old", "0014") call_command("migrate", "good_flow_drop_column_with_constraints_old") assert pg_dump("drop_col_test_table_parent") == drop_col_test_table_parent_schema assert pg_dump("drop_col_test_table_main") == drop_col_test_table_main_schema assert pg_dump("drop_col_test_table_child") == drop_col_test_table_child_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_create_table_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_create_table(): _create_table_sql = one_line_sql(""" CREATE TABLE "idempotency_create_table_app_relatedtesttable" ( "id" integer NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, "test_field_int" integer NULL, "test_model_id" integer NULL ); """) _create_unique_index_sql = one_line_sql(""" CREATE UNIQUE INDEX CONCURRENTLY "idempotency_create_table_app_relatedtesttable_uniq" ON "idempotency_create_table_app_relatedtesttable" ("test_model_id", "test_field_int"); """) _create_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_create_table_app_relatedtesttable" ADD CONSTRAINT "idempotency_create_table_app_relatedtesttable_uniq" UNIQUE USING INDEX "idempotency_create_table_app_relatedtesttable_uniq"; """) _create_foreign_key_sql = one_line_sql(""" ALTER TABLE "idempotency_create_table_app_relatedtesttable" ADD CONSTRAINT "idempotency_create_t_test_model_id_09b52f79_fk_idempoten" FOREIGN KEY ("test_model_id") REFERENCES "idempotency_create_table_app_testtable" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; """) _validate_foreign_key_sql = one_line_sql(""" ALTER TABLE "idempotency_create_table_app_relatedtesttable" VALIDATE CONSTRAINT "idempotency_create_t_test_model_id_09b52f79_fk_idempoten"; """) _create_index_sql = one_line_sql(""" CREATE INDEX CONCURRENTLY "idempotency_create_table_a_test_model_id_09b52f79" ON "idempotency_create_table_app_relatedtesttable" ("test_model_id"); """) _drop_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_create_table_app_relatedtesttable" DROP CONSTRAINT "idempotency_create_table_app_relatedtesttable_uniq"; """) _drop_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "idempotency_create_t_test_model_id_09b52f79_fk_idempoten" IMMEDIATE; ALTER TABLE "idempotency_create_table_app_relatedtesttable" DROP CONSTRAINT "idempotency_create_t_test_model_id_09b52f79_fk_idempoten"; """) _drop_table_sql = one_line_sql(""" DROP TABLE "idempotency_create_table_app_relatedtesttable" CASCADE; """) # get target schema call_command("migrate", "idempotency_create_table_app", "0001") call_command("migrate", "idempotency_create_table_app") new_schema = pg_dump("idempotency_create_table_app_relatedtesttable") # migrate call_command("migrate", "idempotency_create_table_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_create_table_app", "0002") assert split_sql_queries(migration_sql) == [ _create_table_sql, _create_unique_index_sql, _create_unique_constraint_sql, _create_foreign_key_sql, _validate_foreign_key_sql, _create_index_sql, ] # migrate case 1 call_command("migrate", "idempotency_create_table_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_table_sql) call_command("migrate", "idempotency_create_table_app") assert pg_dump("idempotency_create_table_app_relatedtesttable") == new_schema # migrate case 2.1 call_command("migrate", "idempotency_create_table_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_table_sql) cursor.execute(_create_unique_index_sql) assert is_valid_index( "idempotency_create_table_app_relatedtesttable", "idempotency_create_table_app_relatedtesttable_uniq", ) call_command("migrate", "idempotency_create_table_app") assert pg_dump("idempotency_create_table_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_create_table_app_relatedtesttable", "idempotency_create_table_app_relatedtesttable_uniq", ) # migrate case 2.2 call_command("migrate", "idempotency_create_table_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_table_sql) cursor.execute(_create_unique_index_sql) make_index_invalid( "idempotency_create_table_app_relatedtesttable", "idempotency_create_table_app_relatedtesttable_uniq", ) call_command("migrate", "idempotency_create_table_app") assert pg_dump("idempotency_create_table_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_create_table_app_relatedtesttable", "idempotency_create_table_app_relatedtesttable_uniq", ) # migrate case 3 call_command("migrate", "idempotency_create_table_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_table_sql) cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) call_command("migrate", "idempotency_create_table_app") assert pg_dump("idempotency_create_table_app_relatedtesttable") == new_schema # migrate case 4 call_command("migrate", "idempotency_create_table_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_table_sql) cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) cursor.execute(_create_foreign_key_sql) call_command("migrate", "idempotency_create_table_app") assert pg_dump("idempotency_create_table_app_relatedtesttable") == new_schema # migrate case 5 call_command("migrate", "idempotency_create_table_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_table_sql) cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) cursor.execute(_create_foreign_key_sql) cursor.execute(_validate_foreign_key_sql) call_command("migrate", "idempotency_create_table_app") assert pg_dump("idempotency_create_table_app_relatedtesttable") == new_schema # migrate case 6 call_command("migrate", "idempotency_create_table_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_table_sql) cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) cursor.execute(_create_foreign_key_sql) cursor.execute(_validate_foreign_key_sql) cursor.execute(_create_index_sql) call_command("migrate", "idempotency_create_table_app") assert pg_dump("idempotency_create_table_app_relatedtesttable") == new_schema # rollback (covers drop table case) call_command("migrate", "idempotency_create_table_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_create_table_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_unique_constraint_sql, _drop_foreign_key_constraint_sql, _drop_table_sql, ] # rollback case 1 call_command("migrate", "idempotency_create_table_app") with connection.cursor() as cursor: cursor.execute(_drop_unique_constraint_sql) call_command("migrate", "idempotency_create_table_app", "0001") # rollback case 2 call_command("migrate", "idempotency_create_table_app") with connection.cursor() as cursor: cursor.execute(_drop_unique_constraint_sql) cursor.execute(_drop_table_sql) call_command("migrate", "idempotency_create_table_app", "0001") @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_column_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_column(): _add_column_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_app_relatedtesttable" ADD COLUMN "test_field_str" varchar(10) NULL; """) _drop_column_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_app_relatedtesttable" DROP COLUMN "test_field_str" CASCADE; """) # get target schema call_command("migrate", "idempotency_add_column_app", "0001") old_schema = pg_dump("idempotency_add_column_app_relatedtesttable") call_command("migrate", "idempotency_add_column_app") new_schema = pg_dump("idempotency_add_column_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_column_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_column_app", "0002") assert split_sql_queries(migration_sql) == [ _add_column_sql ] # migrate case 1 call_command("migrate", "idempotency_add_column_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) call_command("migrate", "idempotency_add_column_app") assert pg_dump("idempotency_add_column_app_relatedtesttable") == new_schema # rollback (covers drop column case) call_command("migrate", "idempotency_add_column_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_column_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_column_sql ] # rollback case 1 call_command("migrate", "idempotency_add_column_app") with connection.cursor() as cursor: cursor.execute(_drop_column_sql) call_command("migrate", "idempotency_add_column_app", "0001") assert pg_dump("idempotency_add_column_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_column_foreign_key_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_column_foreign_key(): _add_column_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_foreign_key_app_relatedtesttable" ADD COLUMN "test_model_id" integer NULL; """) _create_foreign_key_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_foreign_key_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_colu_test_model_id_99eba75b_fk_idempoten" FOREIGN KEY ("test_model_id") REFERENCES "idempotency_add_column_foreign_key_app_testtable" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; """) _validate_foreign_key_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_foreign_key_app_relatedtesttable" VALIDATE CONSTRAINT "idempotency_add_colu_test_model_id_99eba75b_fk_idempoten"; """) _create_index_sql = one_line_sql(""" CREATE INDEX CONCURRENTLY "idempotency_add_column_for_test_model_id_99eba75b" ON "idempotency_add_column_foreign_key_app_relatedtesttable" ("test_model_id"); """) _drop_index_sql = one_line_sql(""" DROP INDEX CONCURRENTLY IF EXISTS "idempotency_add_column_for_test_model_id_99eba75b"; """) _drop_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "idempotency_add_colu_test_model_id_99eba75b_fk_idempoten" IMMEDIATE; ALTER TABLE "idempotency_add_column_foreign_key_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_colu_test_model_id_99eba75b_fk_idempoten"; """) _drop_column_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_foreign_key_app_relatedtesttable" DROP COLUMN "test_model_id" CASCADE; """) # get target schema call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") old_schema = pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") call_command("migrate", "idempotency_add_column_foreign_key_app") new_schema = pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_column_foreign_key_app", "0002") assert split_sql_queries(migration_sql) == [ _add_column_sql, _create_foreign_key_constraint_sql, _validate_foreign_key_constraint_sql, _create_index_sql, ] # migrate case 1 call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) call_command("migrate", "idempotency_add_column_foreign_key_app") assert pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") == new_schema # migrate case 2 call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_column_foreign_key_app") assert pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") == new_schema # migrate case 3 call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_foreign_key_constraint_sql) cursor.execute(_validate_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_column_foreign_key_app") assert pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") == new_schema # migrate case 4.1 call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_foreign_key_constraint_sql) cursor.execute(_validate_foreign_key_constraint_sql) cursor.execute(_create_index_sql) assert is_valid_index( "idempotency_add_column_foreign_key_app_relatedtesttable", "idempotency_add_column_for_test_model_id_99eba75b", ) call_command("migrate", "idempotency_add_column_foreign_key_app") assert pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_column_foreign_key_app_relatedtesttable", "idempotency_add_column_for_test_model_id_99eba75b", ) # migrate case 4.2 call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_foreign_key_constraint_sql) cursor.execute(_validate_foreign_key_constraint_sql) cursor.execute(_create_index_sql) make_index_invalid( "idempotency_add_column_foreign_key_app_relatedtesttable", "idempotency_add_column_for_test_model_id_99eba75b", ) call_command("migrate", "idempotency_add_column_foreign_key_app") assert pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_column_foreign_key_app_relatedtesttable", "idempotency_add_column_for_test_model_id_99eba75b", ) # rollback (covers drop column case) call_command("migrate", "idempotency_add_column_foreign_key_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_column_foreign_key_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_foreign_key_constraint_sql, _drop_index_sql, _drop_column_sql, ] # rollback case 1 call_command("migrate", "idempotency_add_column_foreign_key_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") assert pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") == old_schema # rollback case 2 call_command("migrate", "idempotency_add_column_foreign_key_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) cursor.execute(_drop_column_sql) call_command("migrate", "idempotency_add_column_foreign_key_app", "0001") assert pg_dump("idempotency_add_column_foreign_key_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_column_one_to_one_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_column_one_to_one(): _add_column_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_one_to_one_app_relatedtesttable" ADD COLUMN "test_model_id" integer NULL; """) _create_unique_index_sql = one_line_sql(""" CREATE UNIQUE INDEX CONCURRENTLY "idempotency_add_column_o_test_model_id_3c5a49fe_uniq" ON "idempotency_add_column_one_to_one_app_relatedtesttable" ("test_model_id"); """) _create_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_one_to_one_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_column_o_test_model_id_3c5a49fe_uniq" UNIQUE USING INDEX "idempotency_add_column_o_test_model_id_3c5a49fe_uniq"; """) _create_foreign_key_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_one_to_one_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_colu_test_model_id_3c5a49fe_fk_idempoten" FOREIGN KEY ("test_model_id") REFERENCES "idempotency_add_column_one_to_one_app_testtable" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; """) _validate_foreign_key_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_one_to_one_app_relatedtesttable" VALIDATE CONSTRAINT "idempotency_add_colu_test_model_id_3c5a49fe_fk_idempoten"; """) _drop_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_one_to_one_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_column_o_test_model_id_3c5a49fe_uniq"; """) _drop_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "idempotency_add_colu_test_model_id_3c5a49fe_fk_idempoten" IMMEDIATE; ALTER TABLE "idempotency_add_column_one_to_one_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_colu_test_model_id_3c5a49fe_fk_idempoten"; """) _drop_column_sql = one_line_sql(""" ALTER TABLE "idempotency_add_column_one_to_one_app_relatedtesttable" DROP COLUMN "test_model_id" CASCADE; """) # get target schema call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") old_schema = pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") call_command("migrate", "idempotency_add_column_one_to_one_app") new_schema = pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_column_one_to_one_app", "0002") assert split_sql_queries(migration_sql) == [ _add_column_sql, _create_unique_index_sql, _create_unique_constraint_sql, _create_foreign_key_constraint_sql, _validate_foreign_key_constraint_sql, ] # migrate case 1 call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) call_command("migrate", "idempotency_add_column_one_to_one_app") assert pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") == new_schema # migrate case 2.1 call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_unique_index_sql) assert is_valid_index( "idempotency_add_column_one_to_one_app_relatedtesttable", "idempotency_add_column_o_test_model_id_3c5a49fe_uniq", ) call_command("migrate", "idempotency_add_column_one_to_one_app") assert pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_add_column_one_to_one_app_relatedtesttable", "idempotency_add_column_o_test_model_id_3c5a49fe_uniq", ) # migrate case 2.2 call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_unique_index_sql) make_index_invalid( "idempotency_add_column_one_to_one_app_relatedtesttable", "idempotency_add_column_o_test_model_id_3c5a49fe_uniq", ) call_command("migrate", "idempotency_add_column_one_to_one_app") assert pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_add_column_one_to_one_app_relatedtesttable", "idempotency_add_column_o_test_model_id_3c5a49fe_uniq", ) # migrate case 3 call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) call_command("migrate", "idempotency_add_column_one_to_one_app") assert pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") == new_schema # migrate case 4 call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) cursor.execute(_create_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_column_one_to_one_app") assert pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") == new_schema # migrate case 5 call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_add_column_sql) cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) cursor.execute(_create_foreign_key_constraint_sql) cursor.execute(_validate_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_column_one_to_one_app") assert pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") == new_schema # rollback (covers drop column case) call_command("migrate", "idempotency_add_column_one_to_one_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_column_one_to_one_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_foreign_key_constraint_sql, _drop_unique_constraint_sql, _drop_column_sql, ] # rollback case 1 call_command("migrate", "idempotency_add_column_one_to_one_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") assert pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") == old_schema # rollback case 2 call_command("migrate", "idempotency_add_column_one_to_one_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) cursor.execute(_drop_column_sql) call_command("migrate", "idempotency_add_column_one_to_one_app", "0001") assert pg_dump("idempotency_add_column_one_to_one_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_set_not_null_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_set_not_null(): _create_check_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_set_not_null_app_relatedtesttable" ADD CONSTRAINT "idempotency_set_not_nu_test_field_int_76dfbad6_notnull" CHECK ("test_field_int" IS NOT NULL) NOT VALID; """) _validate_check_constraint_sql = one_line_sql( """ ALTER TABLE "idempotency_set_not_null_app_relatedtesttable" VALIDATE CONSTRAINT "idempotency_set_not_nu_test_field_int_76dfbad6_notnull"; """) _set_column_not_null_sql = one_line_sql(""" ALTER TABLE "idempotency_set_not_null_app_relatedtesttable" ALTER COLUMN "test_field_int" SET NOT NULL; """) _drop_check_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_set_not_null_app_relatedtesttable" DROP CONSTRAINT "idempotency_set_not_nu_test_field_int_76dfbad6_notnull"; """) _drop_column_not_null_sql = one_line_sql(""" ALTER TABLE "idempotency_set_not_null_app_relatedtesttable" ALTER COLUMN "test_field_int" DROP NOT NULL; """) # get target schema call_command("migrate", "idempotency_set_not_null_app", "0001") old_schema = pg_dump("idempotency_set_not_null_app_relatedtesttable") call_command("migrate", "idempotency_set_not_null_app") new_schema = pg_dump("idempotency_set_not_null_app_relatedtesttable") # migrate call_command("migrate", "idempotency_set_not_null_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_set_not_null_app", "0002") assert split_sql_queries(migration_sql) == [ _create_check_constraint_sql, _validate_check_constraint_sql, _set_column_not_null_sql, _drop_check_constraint_sql, ] # migrate case 1 call_command("migrate", "idempotency_set_not_null_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_check_constraint_sql) call_command("migrate", "idempotency_set_not_null_app") assert pg_dump("idempotency_set_not_null_app_relatedtesttable") == new_schema # migrate case 2 call_command("migrate", "idempotency_set_not_null_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_check_constraint_sql) cursor.execute(_validate_check_constraint_sql) call_command("migrate", "idempotency_set_not_null_app") assert pg_dump("idempotency_set_not_null_app_relatedtesttable") == new_schema # migrate case 3 call_command("migrate", "idempotency_set_not_null_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_check_constraint_sql) cursor.execute(_validate_check_constraint_sql) cursor.execute(_set_column_not_null_sql) call_command("migrate", "idempotency_set_not_null_app") assert pg_dump("idempotency_set_not_null_app_relatedtesttable") == new_schema # migrate case 4 call_command("migrate", "idempotency_set_not_null_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_check_constraint_sql) cursor.execute(_validate_check_constraint_sql) cursor.execute(_set_column_not_null_sql) cursor.execute(_drop_check_constraint_sql) call_command("migrate", "idempotency_set_not_null_app") assert pg_dump("idempotency_set_not_null_app_relatedtesttable") == new_schema # rollback (covers drop not null case) call_command("migrate", "idempotency_set_not_null_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_set_not_null_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_column_not_null_sql ] # rollback case 1 call_command("migrate", "idempotency_set_not_null_app") with connection.cursor() as cursor: cursor.execute(_drop_column_not_null_sql) call_command("migrate", "idempotency_set_not_null_app", "0001") assert pg_dump("idempotency_set_not_null_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_check_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_check(): _create_check_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_check_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_check_app_relatedtesttable_check" CHECK ("test_field_int" > 0) NOT VALID; """) _validate_check_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_check_app_relatedtesttable" VALIDATE CONSTRAINT "idempotency_add_check_app_relatedtesttable_check"; """) _drop_check_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_check_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_check_app_relatedtesttable_check"; """) # get target schema call_command("migrate", "idempotency_add_check_app", "0001") old_schema = pg_dump("idempotency_add_check_app_relatedtesttable") call_command("migrate", "idempotency_add_check_app") new_schema = pg_dump("idempotency_add_check_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_check_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_check_app", "0002") assert split_sql_queries(migration_sql) == [ _create_check_constraint_sql, _validate_check_constraint_sql, ] # migrate case 1 call_command("migrate", "idempotency_add_check_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_check_constraint_sql) call_command("migrate", "idempotency_add_check_app") assert pg_dump("idempotency_add_check_app_relatedtesttable") == new_schema # migrate case 2 call_command("migrate", "idempotency_add_check_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_check_constraint_sql) cursor.execute(_validate_check_constraint_sql) call_command("migrate", "idempotency_add_check_app") assert pg_dump("idempotency_add_check_app_relatedtesttable") == new_schema # rollback (covers drop check case) call_command("migrate", "idempotency_add_check_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_check_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_check_constraint_sql ] # rollback case 1 call_command("migrate", "idempotency_add_check_app") with connection.cursor() as cursor: cursor.execute(_drop_check_constraint_sql) call_command("migrate", "idempotency_add_check_app", "0001") assert pg_dump("idempotency_add_check_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_foreign_key_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_foreign_key(): _create_index_sql = one_line_sql(""" CREATE INDEX CONCURRENTLY "idempotency_add_foreign_ke_test_field_int_fa01ee40" ON "idempotency_add_foreign_key_app_relatedtesttable" ("test_field_int"); """) _create_foreign_key_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_foreign_key_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_fore_test_field_int_fa01ee40_fk_idempoten" FOREIGN KEY ("test_field_int") REFERENCES "idempotency_add_foreign_key_app_testtable" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; """) _validate_foreign_key_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_foreign_key_app_relatedtesttable" VALIDATE CONSTRAINT "idempotency_add_fore_test_field_int_fa01ee40_fk_idempoten"; """) _drop_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "idempotency_add_fore_test_field_int_fa01ee40_fk_idempoten" IMMEDIATE; ALTER TABLE "idempotency_add_foreign_key_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_fore_test_field_int_fa01ee40_fk_idempoten"; """) _drop_index_sql = one_line_sql(""" DROP INDEX CONCURRENTLY IF EXISTS "idempotency_add_foreign_ke_test_field_int_fa01ee40"; """) # get target schema call_command("migrate", "idempotency_add_foreign_key_app", "0001") old_schema = pg_dump("idempotency_add_foreign_key_app_relatedtesttable") call_command("migrate", "idempotency_add_foreign_key_app") new_schema = pg_dump("idempotency_add_foreign_key_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_foreign_key_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_foreign_key_app", "0002") assert split_sql_queries(migration_sql) == [ _create_index_sql, _create_foreign_key_constraint_sql, _validate_foreign_key_constraint_sql, ] # migrate case 1.1 call_command("migrate", "idempotency_add_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_index_sql) assert is_valid_index( "idempotency_add_foreign_key_app_relatedtesttable", "idempotency_add_foreign_ke_test_field_int_fa01ee40", ) call_command("migrate", "idempotency_add_foreign_key_app") assert pg_dump("idempotency_add_foreign_key_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_foreign_key_app_relatedtesttable", "idempotency_add_foreign_ke_test_field_int_fa01ee40", ) # migrate case 1.2 call_command("migrate", "idempotency_add_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_index_sql) make_index_invalid( "idempotency_add_foreign_key_app_relatedtesttable", "idempotency_add_foreign_ke_test_field_int_fa01ee40", ) call_command("migrate", "idempotency_add_foreign_key_app") assert pg_dump("idempotency_add_foreign_key_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_foreign_key_app_relatedtesttable", "idempotency_add_foreign_ke_test_field_int_fa01ee40", ) # migrate case 2 call_command("migrate", "idempotency_add_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_index_sql) cursor.execute(_create_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_foreign_key_app") assert pg_dump("idempotency_add_foreign_key_app_relatedtesttable") == new_schema # migrate case 3 call_command("migrate", "idempotency_add_foreign_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_index_sql) cursor.execute(_create_foreign_key_constraint_sql) cursor.execute(_validate_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_foreign_key_app") assert pg_dump("idempotency_add_foreign_key_app_relatedtesttable") == new_schema # rollback (covers drop foreign key case) call_command("migrate", "idempotency_add_foreign_key_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_foreign_key_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_foreign_key_constraint_sql, _drop_index_sql, ] # rollback case 1 call_command("migrate", "idempotency_add_foreign_key_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_foreign_key_app", "0001") assert pg_dump("idempotency_add_foreign_key_app_relatedtesttable") == old_schema # rollback case 2 call_command("migrate", "idempotency_add_foreign_key_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) cursor.execute(_drop_index_sql) call_command("migrate", "idempotency_add_foreign_key_app", "0001") assert pg_dump("idempotency_add_foreign_key_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_one_to_one_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_one_to_one(): _create_unique_index_sql = one_line_sql(""" CREATE UNIQUE INDEX CONCURRENTLY "idempotency_add_one_to_o_test_field_int_8ebac681_uniq" ON "idempotency_add_one_to_one_app_relatedtesttable" ("test_field_int"); """) _create_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_one_to_one_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_one_to_o_test_field_int_8ebac681_uniq" UNIQUE USING INDEX "idempotency_add_one_to_o_test_field_int_8ebac681_uniq"; """) _create_foreign_key_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_one_to_one_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_one__test_field_int_8ebac681_fk_idempoten" FOREIGN KEY ("test_field_int") REFERENCES "idempotency_add_one_to_one_app_testtable" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID; """) _validate_foreign_key_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_one_to_one_app_relatedtesttable" VALIDATE CONSTRAINT "idempotency_add_one__test_field_int_8ebac681_fk_idempoten"; """) _drop_foreign_key_constraint_sql = one_line_sql(""" SET CONSTRAINTS "idempotency_add_one__test_field_int_8ebac681_fk_idempoten" IMMEDIATE; ALTER TABLE "idempotency_add_one_to_one_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_one__test_field_int_8ebac681_fk_idempoten"; """) _drop_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_one_to_one_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_one_to_o_test_field_int_8ebac681_uniq"; """) _drop_like_index_sql = one_line_sql(""" DROP INDEX CONCURRENTLY IF EXISTS "idempotency_add_one_to_o_test_field_int_8ebac681_like"; """) # get target schema call_command("migrate", "idempotency_add_one_to_one_app", "0001") old_schema = pg_dump("idempotency_add_one_to_one_app_relatedtesttable") call_command("migrate", "idempotency_add_one_to_one_app") new_schema = pg_dump("idempotency_add_one_to_one_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_one_to_one_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_one_to_one_app", "0002") assert split_sql_queries(migration_sql) == [ _create_unique_index_sql, _create_unique_constraint_sql, _create_foreign_key_constraint_sql, _validate_foreign_key_constraint_sql, ] # migrate case 1.1 call_command("migrate", "idempotency_add_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) assert is_valid_index( "idempotency_add_one_to_one_app_relatedtesttable", "idempotency_add_one_to_o_test_field_int_8ebac681_uniq", ) call_command("migrate", "idempotency_add_one_to_one_app") assert pg_dump("idempotency_add_one_to_one_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_add_one_to_one_app_relatedtesttable", "idempotency_add_one_to_o_test_field_int_8ebac681_uniq", ) # migrate case 1.2 call_command("migrate", "idempotency_add_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) make_index_invalid( "idempotency_add_one_to_one_app_relatedtesttable", "idempotency_add_one_to_o_test_field_int_8ebac681_uniq", ) call_command("migrate", "idempotency_add_one_to_one_app") assert pg_dump("idempotency_add_one_to_one_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_add_one_to_one_app_relatedtesttable", "idempotency_add_one_to_o_test_field_int_8ebac681_uniq", ) # migrate case 2 call_command("migrate", "idempotency_add_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) call_command("migrate", "idempotency_add_one_to_one_app") assert pg_dump("idempotency_add_one_to_one_app_relatedtesttable") == new_schema # migrate case 3 call_command("migrate", "idempotency_add_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) cursor.execute(_create_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_one_to_one_app") assert pg_dump("idempotency_add_one_to_one_app_relatedtesttable") == new_schema # migrate case 4 call_command("migrate", "idempotency_add_one_to_one_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) cursor.execute(_create_foreign_key_constraint_sql) cursor.execute(_validate_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_one_to_one_app") assert pg_dump("idempotency_add_one_to_one_app_relatedtesttable") == new_schema # rollback (covers drop one to one case) call_command("migrate", "idempotency_add_one_to_one_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_one_to_one_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_foreign_key_constraint_sql, _drop_unique_constraint_sql, _drop_like_index_sql, ] # rollback case 1 call_command("migrate", "idempotency_add_one_to_one_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) call_command("migrate", "idempotency_add_one_to_one_app", "0001") assert pg_dump("idempotency_add_one_to_one_app_relatedtesttable") == old_schema # rollback case 2 call_command("migrate", "idempotency_add_one_to_one_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) cursor.execute(_drop_unique_constraint_sql) call_command("migrate", "idempotency_add_one_to_one_app", "0001") assert pg_dump("idempotency_add_one_to_one_app_relatedtesttable") == old_schema # rollback case 3 call_command("migrate", "idempotency_add_one_to_one_app") with connection.cursor() as cursor: cursor.execute(_drop_foreign_key_constraint_sql) cursor.execute(_drop_unique_constraint_sql) cursor.execute(_drop_like_index_sql) call_command("migrate", "idempotency_add_one_to_one_app", "0001") assert pg_dump("idempotency_add_one_to_one_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_index_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_index(): _create_index_sql = one_line_sql(""" CREATE INDEX CONCURRENTLY "idempotency_add_index_app__test_field_int_ecc428b5" ON "idempotency_add_index_app_relatedtesttable" ("test_field_int"); """) _drop_index_sql = one_line_sql(""" DROP INDEX CONCURRENTLY IF EXISTS "idempotency_add_index_app__test_field_int_ecc428b5"; """) # get target schema call_command("migrate", "idempotency_add_index_app", "0001") old_schema = pg_dump("idempotency_add_index_app_relatedtesttable") call_command("migrate", "idempotency_add_index_app") new_schema = pg_dump("idempotency_add_index_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_index_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_index_app", "0002") assert split_sql_queries(migration_sql) == [ _create_index_sql, ] # migrate case 1.1 call_command("migrate", "idempotency_add_index_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_index_sql) assert is_valid_index( "idempotency_add_index_app_relatedtesttable", "idempotency_add_index_app__test_field_int_ecc428b5", ) call_command("migrate", "idempotency_add_index_app") assert pg_dump("idempotency_add_index_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_index_app_relatedtesttable", "idempotency_add_index_app__test_field_int_ecc428b5", ) # migrate case 1.2 call_command("migrate", "idempotency_add_index_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_index_sql) make_index_invalid( "idempotency_add_index_app_relatedtesttable", "idempotency_add_index_app__test_field_int_ecc428b5", ) call_command("migrate", "idempotency_add_index_app") assert pg_dump("idempotency_add_index_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_index_app_relatedtesttable", "idempotency_add_index_app__test_field_int_ecc428b5", ) # rollback (covers drop index case) call_command("migrate", "idempotency_add_index_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_index_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_index_sql ] # rollback case 1 call_command("migrate", "idempotency_add_index_app") with connection.cursor() as cursor: cursor.execute(_drop_index_sql) call_command("migrate", "idempotency_add_index_app", "0001") assert pg_dump("idempotency_add_index_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_index_meta_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_index_meta(): _create_index_sql = one_line_sql(""" CREATE INDEX CONCURRENTLY "relatedtesttable_idx" ON "idempotency_add_index_meta_app_relatedtesttable" ("test_field_int", "test_field_str"); """) _drop_index_sql = one_line_sql(""" DROP INDEX CONCURRENTLY IF EXISTS "relatedtesttable_idx"; """) # get target schema call_command("migrate", "idempotency_add_index_meta_app", "0001") old_schema = pg_dump("idempotency_add_index_meta_app_relatedtesttable") call_command("migrate", "idempotency_add_index_meta_app") new_schema = pg_dump("idempotency_add_index_meta_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_index_meta_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_index_meta_app", "0002") assert split_sql_queries(migration_sql) == [ _create_index_sql, ] # migrate case 1.1 call_command("migrate", "idempotency_add_index_meta_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_index_sql) assert is_valid_index( "idempotency_add_index_meta_app_relatedtesttable", "relatedtesttable_idx", ) call_command("migrate", "idempotency_add_index_meta_app") assert pg_dump("idempotency_add_index_meta_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_index_meta_app_relatedtesttable", "relatedtesttable_idx", ) # migrate case 1.2 call_command("migrate", "idempotency_add_index_meta_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_index_sql) make_index_invalid( "idempotency_add_index_meta_app_relatedtesttable", "relatedtesttable_idx", ) call_command("migrate", "idempotency_add_index_meta_app") assert pg_dump("idempotency_add_index_meta_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_index_meta_app_relatedtesttable", "relatedtesttable_idx", ) # rollback (covers drop index case) call_command("migrate", "idempotency_add_index_meta_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_index_meta_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_index_sql ] # rollback case 1 call_command("migrate", "idempotency_add_index_meta_app") with connection.cursor() as cursor: cursor.execute(_drop_index_sql) call_command("migrate", "idempotency_add_index_meta_app", "0001") assert pg_dump("idempotency_add_index_meta_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_unique_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_unique(): _create_unique_index_sql = one_line_sql(""" CREATE UNIQUE INDEX CONCURRENTLY "idempotency_add_unique_a_test_field_int_01c4f0c0_uniq" ON "idempotency_add_unique_app_relatedtesttable" ("test_field_int"); """) _create_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_unique_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_unique_a_test_field_int_01c4f0c0_uniq" UNIQUE USING INDEX "idempotency_add_unique_a_test_field_int_01c4f0c0_uniq"; """) _drop_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_unique_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_unique_a_test_field_int_01c4f0c0_uniq"; """) _drop_like_index_sql = one_line_sql(""" DROP INDEX CONCURRENTLY IF EXISTS "idempotency_add_unique_a_test_field_int_01c4f0c0_like"; """) # get target schema call_command("migrate", "idempotency_add_unique_app", "0001") old_schema = pg_dump("idempotency_add_unique_app_relatedtesttable") call_command("migrate", "idempotency_add_unique_app") new_schema = pg_dump("idempotency_add_unique_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_unique_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_unique_app", "0002") assert split_sql_queries(migration_sql) == [ _create_unique_index_sql, _create_unique_constraint_sql, ] # migrate case 1.1 call_command("migrate", "idempotency_add_unique_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) assert is_valid_index( "idempotency_add_unique_app_relatedtesttable", "idempotency_add_unique_a_test_field_int_01c4f0c0_uniq", ) call_command("migrate", "idempotency_add_unique_app") assert pg_dump("idempotency_add_unique_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_add_unique_app_relatedtesttable", "idempotency_add_unique_a_test_field_int_01c4f0c0_uniq", ) # migrate case 1.2 call_command("migrate", "idempotency_add_unique_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) make_index_invalid( "idempotency_add_unique_app_relatedtesttable", "idempotency_add_unique_a_test_field_int_01c4f0c0_uniq", ) call_command("migrate", "idempotency_add_unique_app") assert pg_dump("idempotency_add_unique_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_add_unique_app_relatedtesttable", "idempotency_add_unique_a_test_field_int_01c4f0c0_uniq", ) # migrate case 2 call_command("migrate", "idempotency_add_unique_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) call_command("migrate", "idempotency_add_unique_app") assert pg_dump("idempotency_add_unique_app_relatedtesttable") == new_schema # rollback (covers drop unique case) call_command("migrate", "idempotency_add_unique_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_unique_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_unique_constraint_sql, _drop_like_index_sql, ] # rollback case 1 call_command("migrate", "idempotency_add_unique_app") with connection.cursor() as cursor: cursor.execute(_drop_unique_constraint_sql) call_command("migrate", "idempotency_add_unique_app", "0001") assert pg_dump("idempotency_add_unique_app_relatedtesttable") == old_schema # rollback case 2 call_command("migrate", "idempotency_add_unique_app") with connection.cursor() as cursor: cursor.execute(_drop_unique_constraint_sql) cursor.execute(_drop_like_index_sql) call_command("migrate", "idempotency_add_unique_app", "0001") assert pg_dump("idempotency_add_unique_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_unique_meta_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_unique_meta(): _create_unique_index_sql = one_line_sql(""" CREATE UNIQUE INDEX CONCURRENTLY "relatedtesttable_uniq" ON "idempotency_add_unique_meta_app_relatedtesttable" ("test_field_int", "test_field_str"); """) _create_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_unique_meta_app_relatedtesttable" ADD CONSTRAINT "relatedtesttable_uniq" UNIQUE USING INDEX "relatedtesttable_uniq"; """) _drop_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_unique_meta_app_relatedtesttable" DROP CONSTRAINT "relatedtesttable_uniq"; """) # get target schema call_command("migrate", "idempotency_add_unique_meta_app", "0001") old_schema = pg_dump("idempotency_add_unique_meta_app_relatedtesttable") call_command("migrate", "idempotency_add_unique_meta_app") new_schema = pg_dump("idempotency_add_unique_meta_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_unique_meta_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_unique_meta_app", "0002") assert split_sql_queries(migration_sql) == [ _create_unique_index_sql, _create_unique_constraint_sql, ] # migrate case 1.1 call_command("migrate", "idempotency_add_unique_meta_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) assert is_valid_index( "idempotency_add_unique_meta_app_relatedtesttable", "relatedtesttable_uniq", ) call_command("migrate", "idempotency_add_unique_meta_app") assert pg_dump("idempotency_add_unique_meta_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_unique_meta_app_relatedtesttable", "relatedtesttable_uniq", ) # migrate case 1.2 call_command("migrate", "idempotency_add_unique_meta_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) make_index_invalid( "idempotency_add_unique_meta_app_relatedtesttable", "relatedtesttable_uniq", ) call_command("migrate", "idempotency_add_unique_meta_app") assert pg_dump("idempotency_add_unique_meta_app_relatedtesttable") == new_schema assert is_valid_index( "idempotency_add_unique_meta_app_relatedtesttable", "relatedtesttable_uniq", ) # migrate case 2 call_command("migrate", "idempotency_add_unique_meta_app", "0001") with connection.cursor() as cursor: cursor.execute(_create_unique_index_sql) cursor.execute(_create_unique_constraint_sql) call_command("migrate", "idempotency_add_unique_meta_app") assert pg_dump("idempotency_add_unique_meta_app_relatedtesttable") == new_schema # rollback (covers drop unique case) call_command("migrate", "idempotency_add_unique_meta_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_unique_meta_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_unique_constraint_sql, ] # rollback case 1 call_command("migrate", "idempotency_add_unique_meta_app") with connection.cursor() as cursor: cursor.execute(_drop_unique_constraint_sql) call_command("migrate", "idempotency_add_unique_meta_app", "0001") assert pg_dump("idempotency_add_unique_meta_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_primary_key_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_primary_key(): _drop_column_sql = one_line_sql(""" ALTER TABLE "idempotency_add_primary_key_app_relatedtesttable" DROP COLUMN "id" CASCADE; """) _create_unique_index_sql = one_line_sql(""" CREATE UNIQUE INDEX CONCURRENTLY "idempotency_add_primary_k_test_field_int_e9cebf24_pk" ON "idempotency_add_primary_key_app_relatedtesttable" ("test_field_int"); """) _create_primary_key_sql = one_line_sql(""" ALTER TABLE "idempotency_add_primary_key_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_primary_k_test_field_int_e9cebf24_pk" PRIMARY KEY USING INDEX "idempotency_add_primary_k_test_field_int_e9cebf24_pk"; """) _drop_unique_constraint_sql = one_line_sql(""" ALTER TABLE "idempotency_add_primary_key_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_primary__test_field_int_e9cebf24_uniq"; """) _drop_primary_key_sql = one_line_sql(""" ALTER TABLE "idempotency_add_primary_key_app_relatedtesttable" DROP CONSTRAINT "idempotency_add_primary_k_test_field_int_e9cebf24_pk"; """) _drop_unique_index_sql = one_line_sql(""" DROP INDEX CONCURRENTLY IF EXISTS "idempotency_add_primary__test_field_int_e9cebf24_like"; """) _add_column_for_rollback_sql = one_line_sql(""" ALTER TABLE "idempotency_add_primary_key_app_relatedtesttable" ADD COLUMN "id" integer NOT NULL GENERATED BY DEFAULT AS IDENTITY; """) if django.VERSION[:2] < (4, 1): _add_column_for_rollback_sql = one_line_sql(""" ALTER TABLE "idempotency_add_primary_key_app_relatedtesttable" ADD COLUMN "id" serial NOT NULL; """) _create_unique_index_for_rollback_sql = one_line_sql(""" CREATE UNIQUE INDEX CONCURRENTLY "idempotency_add_primary_key_app_relatedtesttable_id_d0e5667c_pk" ON "idempotency_add_primary_key_app_relatedtesttable" ("id"); """) _create_unique_constraint_for_rollback_sql = one_line_sql(""" ALTER TABLE "idempotency_add_primary_key_app_relatedtesttable" ADD CONSTRAINT "idempotency_add_primary_key_app_relatedtesttable_id_d0e5667c_pk" PRIMARY KEY USING INDEX "idempotency_add_primary_key_app_relatedtesttable_id_d0e5667c_pk"; """) # get target schema call_command("migrate", "idempotency_add_primary_key_app", "0001") old_schema = pg_dump("idempotency_add_primary_key_app_relatedtesttable") call_command("migrate", "idempotency_add_primary_key_app") new_schema = pg_dump("idempotency_add_primary_key_app_relatedtesttable") # migrate with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_primary_key_app", "0002") assert split_sql_queries(migration_sql) == [ _drop_column_sql, _create_unique_index_sql, _create_primary_key_sql, ] # migrate case 1 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_drop_column_sql) call_command("migrate", "idempotency_add_primary_key_app") assert pg_dump("idempotency_add_primary_key_app_relatedtesttable") == new_schema # migrate case 2.1 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_drop_column_sql) cursor.execute(_create_unique_index_sql) assert is_valid_index( "idempotency_add_primary_key_app_relatedtesttable", "idempotency_add_primary_k_test_field_int_e9cebf24_pk", ) call_command("migrate", "idempotency_add_primary_key_app") assert pg_dump("idempotency_add_primary_key_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_add_primary_key_app_relatedtesttable", "idempotency_add_primary_k_test_field_int_e9cebf24_pk", ) # migrate case 2.2 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_drop_column_sql) cursor.execute(_create_unique_index_sql) make_index_invalid( "idempotency_add_primary_key_app_relatedtesttable", "idempotency_add_primary_k_test_field_int_e9cebf24_pk", ) call_command("migrate", "idempotency_add_primary_key_app") assert pg_dump("idempotency_add_primary_key_app_relatedtesttable") == new_schema assert is_valid_constraint( "idempotency_add_primary_key_app_relatedtesttable", "idempotency_add_primary_k_test_field_int_e9cebf24_pk", ) # migrate case 3 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") with connection.cursor() as cursor: cursor.execute(_drop_column_sql) cursor.execute(_create_unique_index_sql) cursor.execute(_create_primary_key_sql) call_command("migrate", "idempotency_add_primary_key_app") assert pg_dump("idempotency_add_primary_key_app_relatedtesttable") == new_schema # rollback (covers drop primary key case) call_command("migrate", "idempotency_add_primary_key_app") with override_settings( ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False, ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False, ): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_primary_key_app", "0002") if django.VERSION[:2] < (4, 1): assert split_sql_queries(rollback_sql) == [ _drop_unique_constraint_sql, _drop_primary_key_sql, _drop_unique_index_sql, _add_column_for_rollback_sql, _create_unique_index_for_rollback_sql, _create_unique_constraint_for_rollback_sql, ] else: assert split_sql_queries(rollback_sql) == [ _drop_primary_key_sql, _drop_unique_index_sql, _add_column_for_rollback_sql, _create_unique_index_for_rollback_sql, _create_unique_constraint_for_rollback_sql, ] def old_schema_compatible(dump: str) -> str: """ django creates different name for primary key constraint than postgres rolling back drop index can be reason of columns order changes """ return dump.replace( "idempotency_add_primary_key_app_relatedtesttable_id_d0e5667c_pk", "idempotency_add_primary_key_app_relatedtesttable_pkey", ).replace( "test_field_int integer NOT NULL,\n id integer NOT NULL", "id integer NOT NULL,\n test_field_int integer NOT NULL", ) # rollback case 1 call_command("migrate", "idempotency_add_primary_key_app") with connection.cursor() as cursor: cursor.execute(_drop_primary_key_sql) with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") assert old_schema_compatible(pg_dump("idempotency_add_primary_key_app_relatedtesttable")) == old_schema # rollback case 2 call_command("migrate", "idempotency_add_primary_key_app") with connection.cursor() as cursor: cursor.execute(_drop_primary_key_sql) cursor.execute(_drop_unique_index_sql) with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") assert old_schema_compatible(pg_dump("idempotency_add_primary_key_app_relatedtesttable")) == old_schema # rollback case 3 call_command("migrate", "idempotency_add_primary_key_app") with connection.cursor() as cursor: cursor.execute(_drop_primary_key_sql) cursor.execute(_drop_unique_index_sql) cursor.execute(_add_column_for_rollback_sql) with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") assert old_schema_compatible(pg_dump("idempotency_add_primary_key_app_relatedtesttable")) == old_schema # rollback case 4 call_command("migrate", "idempotency_add_primary_key_app") with connection.cursor() as cursor: cursor.execute(_drop_primary_key_sql) cursor.execute(_drop_unique_index_sql) cursor.execute(_add_column_for_rollback_sql) cursor.execute(_create_unique_index_for_rollback_sql) with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") assert old_schema_compatible(pg_dump("idempotency_add_primary_key_app_relatedtesttable")) == old_schema # rollback case 5 call_command("migrate", "idempotency_add_primary_key_app") with connection.cursor() as cursor: cursor.execute(_drop_primary_key_sql) cursor.execute(_drop_unique_index_sql) cursor.execute(_add_column_for_rollback_sql) cursor.execute(_create_unique_index_for_rollback_sql) cursor.execute(_create_unique_constraint_for_rollback_sql) with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_primary_key_app", "0001") assert old_schema_compatible(pg_dump("idempotency_add_primary_key_app_relatedtesttable")) == old_schema @skip_for_default_django_backend @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_auto_field_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_auto_field(): _set_type_sql = one_line_sql(""" ALTER TABLE "idempotency_add_auto_field_app_relatedtesttable" ALTER COLUMN "test_field_int" TYPE integer; """) _set_identity_sql = one_line_sql(""" ALTER TABLE "idempotency_add_auto_field_app_relatedtesttable" ALTER COLUMN "test_field_int" ADD GENERATED BY DEFAULT AS IDENTITY; """) _drop_identity_sql = one_line_sql(""" ALTER TABLE "idempotency_add_auto_field_app_relatedtesttable" ALTER COLUMN "test_field_int" DROP IDENTITY IF EXISTS; """) _set_type_for_rollback_sql = one_line_sql(""" ALTER TABLE "idempotency_add_auto_field_app_relatedtesttable" ALTER COLUMN "test_field_int" TYPE integer; """) _sql_drop_sequence_sql = one_line_sql(""" DROP SEQUENCE IF EXISTS "idempotency_add_auto_field_app_relatedtestta_test_field_int_seq" CASCADE; """) # get target schema call_command("migrate", "idempotency_add_auto_field_app", "0001") old_schema = pg_dump("idempotency_add_auto_field_app_relatedtesttable") call_command("migrate", "idempotency_add_auto_field_app") new_schema = pg_dump("idempotency_add_auto_field_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_auto_field_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_auto_field_app", "0002") assert split_sql_queries(migration_sql) == [ _set_type_sql, _set_identity_sql, ] # migrate case 1 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_auto_field_app", "0001") with connection.cursor() as cursor: cursor.execute(_set_type_sql) call_command("migrate", "idempotency_add_auto_field_app") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == new_schema # migrate case 2 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_auto_field_app", "0001") with connection.cursor() as cursor: cursor.execute(_set_type_sql) cursor.execute(_set_identity_sql) call_command("migrate", "idempotency_add_auto_field_app") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == new_schema # rollback (covers drop auto field case) call_command("migrate", "idempotency_add_auto_field_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_auto_field_app", "0002") assert split_sql_queries(rollback_sql) == [ _drop_identity_sql, _set_type_for_rollback_sql, _sql_drop_sequence_sql, ] # rollback case 1 call_command("migrate", "idempotency_add_auto_field_app") with connection.cursor() as cursor: cursor.execute(_drop_identity_sql) call_command("migrate", "idempotency_add_auto_field_app", "0001") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == old_schema # rollback case 2 call_command("migrate", "idempotency_add_auto_field_app") with connection.cursor() as cursor: cursor.execute(_drop_identity_sql) cursor.execute(_set_type_for_rollback_sql) call_command("migrate", "idempotency_add_auto_field_app", "0001") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == old_schema # rollback case 3 call_command("migrate", "idempotency_add_auto_field_app") with connection.cursor() as cursor: cursor.execute(_drop_identity_sql) cursor.execute(_set_type_for_rollback_sql) cursor.execute(_sql_drop_sequence_sql) call_command("migrate", "idempotency_add_auto_field_app", "0001") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == old_schema @skip_for_default_django_backend @pytest.mark.skipif( django.VERSION[:2] >= (4, 1), reason="django before 4.1 case", ) @pytest.mark.django_db(transaction=True) @modify_settings(INSTALLED_APPS={"append": "tests.apps.idempotency_add_auto_field_app"}) @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) @override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=True) def test_idempotency_add_auto_field_old(): _set_type_sql = one_line_sql(""" ALTER TABLE "idempotency_add_auto_field_app_relatedtesttable" ALTER COLUMN "test_field_int" TYPE integer USING "test_field_int"::integer; """) _drop_sequence_sql = one_line_sql(""" DROP SEQUENCE IF EXISTS "idempotency_add_auto_field_app_relatedtesttable_test_field_int_seq" CASCADE; """) _create_sequence_sql = one_line_sql(""" CREATE SEQUENCE "idempotency_add_auto_field_app_relatedtesttable_test_field_int_seq"; """) _set_default_sql = one_line_sql(""" ALTER TABLE "idempotency_add_auto_field_app_relatedtesttable" ALTER COLUMN "test_field_int" SET DEFAULT nextval(\'"idempotency_add_auto_field_app_relatedtesttable_test_field_int_seq"\'); """) _set_max_value_sql = one_line_sql(""" SELECT setval('"idempotency_add_auto_field_app_relatedtesttable_test_field_int_seq"', MAX("test_field_int")) FROM "idempotency_add_auto_field_app_relatedtesttable"; """) _set_owner_sql = one_line_sql(""" ALTER SEQUENCE "idempotency_add_auto_field_app_relatedtesttable_test_field_int_seq" OWNED BY "idempotency_add_auto_field_app_relatedtesttable"."test_field_int"; """) _set_type_for_rollback_sql = one_line_sql(""" ALTER TABLE "idempotency_add_auto_field_app_relatedtesttable" ALTER COLUMN "test_field_int" TYPE integer USING "test_field_int"::integer; """) _drop_sequence_sql = one_line_sql(""" DROP SEQUENCE IF EXISTS "idempotency_add_auto_field_app_relatedtesttable_test_field_int_seq" CASCADE; """) # get target schema call_command("migrate", "idempotency_add_auto_field_app", "0001") old_schema = pg_dump("idempotency_add_auto_field_app_relatedtesttable") call_command("migrate", "idempotency_add_auto_field_app") new_schema = pg_dump("idempotency_add_auto_field_app_relatedtesttable") # migrate call_command("migrate", "idempotency_add_auto_field_app", "0001") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): migration_sql = call_command("sqlmigrate", "idempotency_add_auto_field_app", "0002") assert split_sql_queries(migration_sql) == [ _set_type_sql, _drop_sequence_sql, _create_sequence_sql, _set_default_sql, _set_max_value_sql, _set_owner_sql, ] def schema_compatible_sequence_alter(dump: str) -> str: return dump.replace( textwrap.dedent(""" CREATE TABLE public.idempotency_add_auto_field_app_relatedtesttable ( test_field_int integer DEFAULT nextval('public.idempotency_add_auto_field_app_relatedtesttable_test_field_int_'::regclass) NOT NULL ); ALTER TABLE public.idempotency_add_auto_field_app_relatedtesttable OWNER TO test; """).replace("\n nextval", " nextval"), textwrap.dedent(""" CREATE TABLE public.idempotency_add_auto_field_app_relatedtesttable ( test_field_int integer NOT NULL ); ALTER TABLE public.idempotency_add_auto_field_app_relatedtesttable OWNER TO test; -- -- Name: idempotency_add_auto_field_app_relatedtesttable_test_field_int_; -- Type: SEQUENCE; Schema: public; Owner: test -- CREATE SEQUENCE public.idempotency_add_auto_field_app_relatedtesttable_test_field_int_ START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; ALTER SEQUENCE public.idempotency_add_auto_field_app_relatedtesttable_test_field_int_ OWNER TO test; -- -- Name: idempotency_add_auto_field_app_relatedtesttable_test_field_int_; -- Type: SEQUENCE OWNED BY; Schema: public; Owner: test -- ALTER SEQUENCE public.idempotency_add_auto_field_app_relatedtesttable_test_field_int_ OWNED BY public.idempotency_add_auto_field_app_relatedtesttable.test_field_int; -- -- Name: idempotency_add_auto_field_app_relatedtesttable test_field_int; -- Type: DEFAULT; Schema: public; Owner: test -- ALTER TABLE ONLY public.idempotency_add_auto_field_app_relatedtesttable ALTER COLUMN test_field_int SET DEFAULT nextval('public.idempotency_add_auto_field_app_relatedtesttable_test_field_int_'::regclass); """).replace("\n-- Type", " Type").replace("\nOWNED", " OWNED").replace("\nSET", " SET"), ) def schema_compatible_explicit_type(dump: str) -> str: return dump.replace( textwrap.dedent(""" CREATE SEQUENCE public.idempotency_add_auto_field_app_relatedtesttable_test_field_int_ AS integer START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; """), textwrap.dedent(""" CREATE SEQUENCE public.idempotency_add_auto_field_app_relatedtesttable_test_field_int_ START WITH 1 INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1; """), ) # migrate case 1 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_auto_field_app", "0001") with connection.cursor() as cursor: cursor.execute(_set_type_sql) call_command("migrate", "idempotency_add_auto_field_app") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == new_schema # migrate case 2 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_auto_field_app", "0001") with connection.cursor() as cursor: cursor.execute(_set_type_sql) cursor.execute(_drop_sequence_sql) call_command("migrate", "idempotency_add_auto_field_app") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == new_schema # migrate case 3 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_auto_field_app", "0001") with connection.cursor() as cursor: cursor.execute(_set_type_sql) cursor.execute(_drop_sequence_sql) cursor.execute(_create_sequence_sql) call_command("migrate", "idempotency_add_auto_field_app") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == new_schema # migrate case 4 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_auto_field_app", "0001") with connection.cursor() as cursor: cursor.execute(_set_type_sql) cursor.execute(_drop_sequence_sql) cursor.execute(_create_sequence_sql) cursor.execute(_set_default_sql) call_command("migrate", "idempotency_add_auto_field_app") assert schema_compatible_sequence_alter( pg_dump("idempotency_add_auto_field_app_relatedtesttable") ) == new_schema # migrate case 5 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_auto_field_app", "0001") with connection.cursor() as cursor: cursor.execute(_set_type_sql) cursor.execute(_drop_sequence_sql) cursor.execute(_create_sequence_sql) cursor.execute(_set_default_sql) cursor.execute(_set_max_value_sql) call_command("migrate", "idempotency_add_auto_field_app") assert schema_compatible_sequence_alter( pg_dump("idempotency_add_auto_field_app_relatedtesttable") ) == new_schema # migrate case 6 with override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=False): call_command("migrate", "idempotency_add_auto_field_app", "0001") with connection.cursor() as cursor: cursor.execute(_set_type_sql) cursor.execute(_drop_sequence_sql) cursor.execute(_create_sequence_sql) cursor.execute(_set_default_sql) cursor.execute(_set_max_value_sql) cursor.execute(_set_owner_sql) call_command("migrate", "idempotency_add_auto_field_app") assert schema_compatible_explicit_type(pg_dump("idempotency_add_auto_field_app_relatedtesttable")) == new_schema # rollback (covers drop auto field case) call_command("migrate", "idempotency_add_auto_field_app") with override_settings(ZERO_DOWNTIME_MIGRATIONS_IDEMPOTENT_SQL=False): rollback_sql = call_command("sqlmigrate", "--backwards", "idempotency_add_auto_field_app", "0002") assert split_sql_queries(rollback_sql) == [ _set_type_for_rollback_sql, _drop_sequence_sql, ] # rollback case 1 call_command("migrate", "idempotency_add_auto_field_app") with connection.cursor() as cursor: cursor.execute(_set_type_for_rollback_sql) call_command("migrate", "idempotency_add_auto_field_app", "0001") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == old_schema # rollback case 2 call_command("migrate", "idempotency_add_auto_field_app") with connection.cursor() as cursor: cursor.execute(_drop_sequence_sql) call_command("migrate", "idempotency_add_auto_field_app", "0001") assert pg_dump("idempotency_add_auto_field_app_relatedtesttable") == old_schema ================================================ FILE: tests/settings.py ================================================ """ Django settings for test project. Generated by 'django-admin startproject' using Django 2.0.6. For more information on this file, see https://docs.djangoproject.com/en/2.0/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.0/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '56wnjyh@bo044_p7o_ckx37*^vv1^c0a5u(c1gt9!6t4+96$nm' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'django_zero_downtime_migrations', 'tests', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] MIGRATION_MODULES = { 'admin': None, 'auth': None, 'contenttypes': None, 'sessions': None, 'messages': None, 'staticfiles': None, } TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'tests2.wsgi.application' # Database # https://docs.djangoproject.com/en/2.0/ref/settings/#databases DATABASES = { 'default': { # 'ENGINE': 'django.db.backends.postgresql', 'ENGINE': os.getenv('DB_ENGINE', 'django_zero_downtime_migrations.backends.postgres'), 'NAME': 'test', 'USER': os.getenv('DB_USER', 'postgres'), 'PASSWORD': os.getenv('DB_PASSWORD', os.getenv('DB_USER', 'postgres')), 'HOST': os.getenv('DB_HOST', 'localhost'), 'PORT': os.getenv('DB_PORT', '5432'), }, } DB_SUPER_USER = os.getenv('DB_SUPER_USER', DATABASES['default']['USER']) DB_SUPER_PASSWORD = os.getenv('DB_SUPER_PASSWORD', os.getenv('DB_SUPER_USER', DATABASES['default']['PASSWORD'])) # Password validation # https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.0/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.0/howto/static-files/ STATIC_URL = '/static/' ================================================ FILE: tests/settings_make_migrations.py ================================================ from .settings import * # noqa: F401, F403 INSTALLED_APPS += [ # noqa: F405 'tests.apps.good_flow_alter_table_with_same_db_table', 'tests.apps.good_flow_app', 'tests.apps.good_flow_app_concurrently', 'tests.apps.bad_rollback_flow_drop_column_with_notnull_default_app', 'tests.apps.bad_rollback_flow_drop_column_with_notnull_app', 'tests.apps.bad_rollback_flow_change_char_type_that_safe_app', 'tests.apps.bad_flow_add_column_with_default_app', 'tests.apps.bad_flow_add_column_with_notnull_default_app', 'tests.apps.bad_flow_add_column_with_notnull_app', 'tests.apps.bad_flow_change_char_type_that_unsafe_app', 'tests.apps.old_notnull_check_constraint_migration_app', 'tests.apps.good_flow_drop_table_with_constraints', 'tests.apps.good_flow_drop_column_with_constraints', 'tests.apps.idempotency_create_table_app', 'tests.apps.idempotency_add_column_app', 'tests.apps.idempotency_add_column_foreign_key_app', 'tests.apps.idempotency_add_column_one_to_one_app', 'tests.apps.idempotency_set_not_null_app', 'tests.apps.idempotency_add_check_app', 'tests.apps.idempotency_add_foreign_key_app', 'tests.apps.idempotency_add_one_to_one_app', 'tests.apps.idempotency_add_index_app', 'tests.apps.idempotency_add_index_meta_app', 'tests.apps.idempotency_add_unique_app', 'tests.apps.idempotency_add_unique_meta_app', 'tests.apps.idempotency_add_primary_key_app', 'tests.apps.idempotency_add_auto_field_app', ] ================================================ FILE: tests/unit/__init__.py ================================================ ================================================ FILE: tests/unit/test_schema.py ================================================ from functools import partial import django from django.conf import settings from django.contrib.postgres.constraints import ExclusionConstraint from django.contrib.postgres.indexes import ( BrinIndex, BTreeIndex, GinIndex, GistIndex, HashIndex, SpGistIndex ) from django.db import connection, models from django.db.backends.postgresql.schema import ( DatabaseSchemaEditor as CoreDatabaseSchemaEditor ) from django.test import override_settings from django.utils.module_loading import import_string import pytest from django_zero_downtime_migrations.backends.postgres.schema import ( UnsafeOperationException, UnsafeOperationWarning ) DatabaseSchemaEditor = import_string(settings.DATABASES['default']['ENGINE'] + '.schema.DatabaseSchemaEditor') START_TIMEOUTS = [ 'SET statement_timeout TO \'0\';', 'SET lock_timeout TO \'0\';', ] END_TIMEOUTS = [ 'SET statement_timeout TO \'0ms\';', 'SET lock_timeout TO \'0ms\';', ] START_FLEXIBLE_STATEMENT_TIMEOUT = [ 'SET statement_timeout TO \'0ms\';', ] END_FLEXIBLE_STATEMENT_TIMEOUT = [ 'SET statement_timeout TO \'0ms\';', ] def timeouts(statements): if isinstance(statements, str): statements = [statements] return START_TIMEOUTS + statements + END_TIMEOUTS def flexible_statement_timeout(statements): if isinstance(statements, str): statements = [statements] return START_FLEXIBLE_STATEMENT_TIMEOUT + statements + END_FLEXIBLE_STATEMENT_TIMEOUT class Model(models.Model): field1 = models.IntegerField() field2 = models.IntegerField() class Model2(models.Model): pass schema_editor = partial(DatabaseSchemaEditor, connection=connection, collect_sql=True) class cmp_schema_editor: schema_editor = DatabaseSchemaEditor core_schema_editor = CoreDatabaseSchemaEditor def __enter__(self): self.editor = self.schema_editor(connection=connection, collect_sql=True).__enter__() self.core_editor = self.core_schema_editor(connection=connection, collect_sql=True, atomic=False).__enter__() return self def __exit__(self, exc_type, exc_value, traceback): self.core_editor.__exit__(exc_type, exc_value, traceback) self.editor.__exit__(exc_type, exc_value, traceback) def __getattr__(self, item): self.method = item value = getattr(self.editor, self.method) if callable(value): return self return value def __call__(self, *args, **kwargs): getattr(self.core_editor, self.method)(*args, **kwargs) return getattr(self.editor, self.method)(*args, **kwargs) @property def django_sql(self): return self.core_editor.collected_sql @pytest.fixture(autouse=True) def zero_timeouts(): with override_settings(ZERO_DOWNTIME_MIGRATIONS_LOCK_TIMEOUT=0): with override_settings(ZERO_DOWNTIME_MIGRATIONS_STATEMENT_TIMEOUT=0): yield @pytest.fixture(autouse=True) def cursor(mocker): with connection.cursor() as cursor: mocker.patch.object(connection, 'cursor')().__enter__.return_value = cursor yield cursor @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_create_model__ok(): with cmp_schema_editor() as editor: editor.create_model(Model) assert editor.collected_sql == editor.django_sql assert editor.django_sql == [ 'CREATE TABLE "tests_model" ' '("id" integer NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY, ' '"field1" integer NOT NULL, "field2" integer NOT NULL);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_drop_model__ok(): with cmp_schema_editor() as editor: editor.delete_model(Model) assert editor.collected_sql == editor.django_sql assert editor.django_sql == [ 'DROP TABLE "tests_model" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_drop_model__drop_foreign_key__ok(cursor, mocker): mocker.patch.object(cursor, 'execute') mocker.patch.object(cursor, 'fetchall').return_value = [ ('tests_model_model2_id_fk', 'f', 'tests_model', 'tests_model2', ['model2_id'], ['id']) ] with cmp_schema_editor() as editor: editor.delete_model(Model) assert editor.collected_sql == timeouts( 'SET CONSTRAINTS "tests_model_model2_id_fk" IMMEDIATE; ' 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_model2_id_fk";', ) + [ 'DROP TABLE "tests_model" CASCADE;' ] assert editor.django_sql == [ 'DROP TABLE "tests_model" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_drop_model__drop_foreign_key_backref__ok(cursor, mocker): mocker.patch.object(cursor, 'execute') mocker.patch.object(cursor, 'fetchall').return_value = [ ('tests_model2_model_id_fk', 'f', 'tests_model2', 'tests_model', ['model_id'], ['id']) ] with cmp_schema_editor() as editor: editor.delete_model(Model) assert editor.collected_sql == timeouts( 'SET CONSTRAINTS "tests_model2_model_id_fk" IMMEDIATE; ' 'ALTER TABLE "tests_model2" DROP CONSTRAINT "tests_model2_model_id_fk";', ) + [ 'DROP TABLE "tests_model" CASCADE;' ] assert editor.django_sql == [ 'DROP TABLE "tests_model" CASCADE;', ] @pytest.mark.django_db def test_rename_model__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER TABLE RENAME is unsafe operation'): editor.alter_db_table(Model, 'old_name', 'new_name') assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "old_name" RENAME TO "new_name";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_rename_model__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER TABLE RENAME is unsafe operation'): editor.alter_db_table(Model, 'old_name', 'new_name') assert editor.django_sql == [ 'ALTER TABLE "old_name" RENAME TO "new_name";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_rename_model_with_same_db_table__ok(): with cmp_schema_editor() as editor: editor.alter_db_table(Model, 'same_table', 'same_table') assert editor.collected_sql == editor.django_sql assert editor.django_sql == [] @pytest.mark.django_db def test_change_model_tablespace__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER TABLE SET TABLESPACE is unsafe operation'): editor.alter_db_tablespace(Model, 'old_tablespace', 'new_tablespace') assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" SET TABLESPACE "new_tablespace";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_change_model_tablespace__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER TABLE SET TABLESPACE is unsafe operation'): editor.alter_db_tablespace(Model, 'old_tablespace', 'new_tablespace') assert editor.django_sql == [ 'ALTER TABLE "tests_model" SET TABLESPACE "new_tablespace";', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (4, 2), reason='functionality provided in django 4.2') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_change_model_comment__ok(): with cmp_schema_editor() as editor: editor.alter_db_table_comment(Model, 'old_comment', 'new_comment') assert editor.collected_sql == editor.django_sql assert editor.django_sql == ['COMMENT ON TABLE "tests_model" IS \'new_comment\';'] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, null=True) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL;' ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_code_default_null__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, default='test', null=True) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NULL;', ) + timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP DEFAULT;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NULL;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP DEFAULT;', ] @pytest.mark.django_db def test_add_field_with_code_default_not_null__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ADD COLUMN NOT NULL is unsafe operation'): field = models.CharField(max_length=40, default='test', null=False) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NOT NULL;' ) + timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP DEFAULT;' ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NOT NULL;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP DEFAULT;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_code_default_not_null__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ADD COLUMN NOT NULL is unsafe operation'): field = models.CharField(max_length=40, default='test', null=False) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NOT NULL;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP DEFAULT;', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] >= (5, 0), reason='setting deprecated for django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT=True) def test_add_field_with_code_default_not_null__keep_default__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, default='test', null=False) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NOT NULL;' ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NOT NULL;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP DEFAULT;', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='setting deprecated in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_KEEP_DEFAULT=True) def test_add_field_with_code_default_not_null__keep_default__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ADD COLUMN NOT NULL is unsafe operation'): field = models.CharField(max_length=40, default='test', null=False) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NOT NULL;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP DEFAULT;', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_db_default_null__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, db_default='test', null=True) field.set_attributes_from_name('field') field.model = Model editor.add_field(Model, field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NULL;', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_db_default_not_null__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, db_default='test', null=False) field.set_attributes_from_name('field') field.model = Model editor.add_field(Model, field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NOT NULL;', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_code_default_db_default_not_null__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, db_default='test', default='test2', null=False) field.set_attributes_from_name('field') field.model = Model editor.add_field(Model, field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) DEFAULT \'test\' NOT NULL;', ] @pytest.mark.django_db def test_add_field_with_not_null__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ADD COLUMN NOT NULL is unsafe operation'): field = models.CharField(max_length=40, null=False) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NOT NULL;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_field_with_not_null__with_flexible_timeout__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ADD COLUMN NOT NULL is unsafe operation'): field = models.CharField(max_length=40, null=False) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NOT NULL;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_not_null__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ADD COLUMN NOT NULL is unsafe operation'): field = models.CharField(max_length=40, null=False) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NOT NULL;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_foreign_key__ok(): with cmp_schema_editor() as editor: field = models.ForeignKey(Model2, null=True, on_delete=models.CASCADE) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field_id" integer NULL;', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id" ' 'FOREIGN KEY ("field_id") REFERENCES "tests_model2" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID;', ) + [ 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id";', ] + [ 'CREATE INDEX CONCURRENTLY "tests_model_field_id_0166400c" ON "tests_model" ("field_id");', ] assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field_id" integer NULL ' 'CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id" ' 'REFERENCES "tests_model2"("id") DEFERRABLE INITIALLY DEFERRED; ' 'SET CONSTRAINTS "tests_model_field_id_0166400c_fk_tests_model2_id" IMMEDIATE;', 'CREATE INDEX "tests_model_field_id_0166400c" ON "tests_model" ("field_id");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_field_with_foreign_key__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: field = models.ForeignKey(Model2, null=True, on_delete=models.CASCADE) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field_id" integer NULL;', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id" ' 'FOREIGN KEY ("field_id") REFERENCES "tests_model2" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID;', ) + flexible_statement_timeout( 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id";', ) + flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field_id_0166400c" ON "tests_model" ("field_id");', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field_id" integer NULL ' 'CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id" ' 'REFERENCES "tests_model2"("id") DEFERRABLE INITIALLY DEFERRED; ' 'SET CONSTRAINTS "tests_model_field_id_0166400c_fk_tests_model2_id" IMMEDIATE;', 'CREATE INDEX "tests_model_field_id_0166400c" ON "tests_model" ("field_id");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_primary_key__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, null=True, primary_key=True) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL;', ) + [ 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field_0a53d95f_pk" ON "tests_model" ("field");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_pk" ' 'PRIMARY KEY USING INDEX "tests_model_field_0a53d95f_pk";', ) + [ 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL PRIMARY KEY;', 'CREATE INDEX "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_field_with_primary_key__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, null=True, primary_key=True) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL;', ) + flexible_statement_timeout( 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field_0a53d95f_pk" ON "tests_model" ("field");', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_pk" ' 'PRIMARY KEY USING INDEX "tests_model_field_0a53d95f_pk";', ) + flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL PRIMARY KEY;', 'CREATE INDEX "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_field_with_unique__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, null=True, unique=True) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL;', ) + [ 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field_0a53d95f_uniq" ON "tests_model" ("field");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_uniq" ' 'UNIQUE USING INDEX "tests_model_field_0a53d95f_uniq";', ) + [ 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f_like" ' 'ON "tests_model" ("field" varchar_pattern_ops);', ] assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL UNIQUE;', 'CREATE INDEX "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_field_with_unique__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40, null=True, unique=True) field.set_attributes_from_name('field') editor.add_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL;', ) + flexible_statement_timeout( 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field_0a53d95f_uniq" ON "tests_model" ("field");', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_uniq" ' 'UNIQUE USING INDEX "tests_model_field_0a53d95f_uniq";', ) + flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f_like" ' 'ON "tests_model" ("field" varchar_pattern_ops);', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD COLUMN "field" varchar(40) NULL UNIQUE;', 'CREATE INDEX "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_integer_to_integer_identity__ok(): with cmp_schema_editor() as editor: old_field = models.IntegerField(primary_key=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.AutoField(primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE integer;', ) + timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" ADD GENERATED BY DEFAULT AS IDENTITY;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE integer;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" ADD GENERATED BY DEFAULT AS IDENTITY;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_integer_identity_to_integer__ok(): with cmp_schema_editor() as editor: old_field = models.AutoField(primary_key=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.IntegerField(primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) if django.VERSION[:2] >= (4, 1): assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP IDENTITY IF EXISTS;', ) + timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE integer;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP IDENTITY IF EXISTS;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE integer;', ] else: assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE integer USING "field"::integer;', ) + timeouts( 'DROP SEQUENCE IF EXISTS "tests_model_field_seq" CASCADE;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE integer USING "field"::integer;', 'DROP SEQUENCE IF EXISTS "tests_model_field_seq" CASCADE;', ] @pytest.mark.django_db def test_alter_field_type_integer_to_bigint_identity__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.IntegerField(primary_key=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.BigAutoField(primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', ) + timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" ADD GENERATED BY DEFAULT AS IDENTITY;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" ADD GENERATED BY DEFAULT AS IDENTITY;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_integer_to_bigint_identity__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.IntegerField(primary_key=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.BigAutoField(primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) if django.VERSION[:2] >= (4, 1): assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" ADD GENERATED BY DEFAULT AS IDENTITY;' ] else: assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'DROP SEQUENCE IF EXISTS "tests_model_field_seq" CASCADE;', 'CREATE SEQUENCE "tests_model_field_seq";', 'ALTER TABLE "tests_model" ALTER COLUMN "field" SET DEFAULT nextval(\'"tests_model_field_seq"\');', 'SELECT setval(\'"tests_model_field_seq"\', MAX("field")) FROM "tests_model";', 'ALTER SEQUENCE "tests_model_field_seq" OWNED BY "tests_model"."field";', ] @pytest.mark.django_db def test_alter_field_type_integer_identity_to_bigint__warning(mocker): mocker.patch.object(connection.introspection, 'get_sequences').return_value = [{ 'column': 'field', 'name': 'field_seq', 'table': 'tests_model', }] with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.AutoField(primary_key=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.BigIntegerField(primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP IDENTITY IF EXISTS;', ) + timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', ) + timeouts( 'DROP SEQUENCE IF EXISTS "field_seq" CASCADE;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP IDENTITY IF EXISTS;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'DROP SEQUENCE IF EXISTS "field_seq" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_integer_identity_to_bigint__raise(mocker): mocker.patch.object(connection.introspection, 'get_sequences').return_value = [{ 'column': 'field', 'name': 'field_seq', 'table': 'tests_model', }] with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.AutoField(primary_key=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.BigIntegerField(primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) if django.VERSION[:2] >= (4, 1): assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP IDENTITY IF EXISTS;', 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'DROP SEQUENCE IF EXISTS "field_seq" CASCADE;', ] else: assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'DROP SEQUENCE IF EXISTS "tests_model_field_seq" CASCADE;', ] @pytest.mark.django_db def test_alter_field_type_integer_identity_to_bigint_identity__warning(mocker): mocker.patch.object(connection.introspection, 'get_sequences').return_value = [{ 'column': 'field', 'name': 'field_seq', 'table': 'tests_model', }] with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.AutoField(primary_key=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.BigAutoField(primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', ) + timeouts( 'ALTER SEQUENCE IF EXISTS "field_seq" AS bigint;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'ALTER SEQUENCE IF EXISTS "field_seq" AS bigint;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_integer_identity_to_bigint_identity__raise(mocker): mocker.patch.object(connection.introspection, 'get_sequences').return_value = [{ 'column': 'field', 'name': 'field_seq', 'table': 'tests_model', }] with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.AutoField(primary_key=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.BigAutoField(primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) if django.VERSION[:2] >= (4, 1): assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'ALTER SEQUENCE IF EXISTS "field_seq" AS bigint;', ] else: assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE bigint USING "field"::bigint;', 'DROP SEQUENCE IF EXISTS "tests_model_field_seq" CASCADE;', 'CREATE SEQUENCE "tests_model_field_seq";', 'ALTER TABLE "tests_model" ALTER COLUMN "field" SET DEFAULT nextval(\'"tests_model_field_seq"\');', 'SELECT setval(\'"tests_model_field_seq"\', MAX("field")) FROM "tests_model";', 'ALTER SEQUENCE "tests_model_field_seq" OWNED BY "tests_model"."field";', ] @pytest.mark.django_db def test_alter_field_type_varchar40_to_varchar20__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=20) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE varchar(20);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_varchar40_to_varchar20_error(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=20) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE varchar(20);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_varchar40_to_varchar80__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=80) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE varchar(80);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_varchar40_to_text__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.TextField() new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE text USING "field"::text;', ] @pytest.mark.django_db def test_alter_field_type_decimal10_2_to_decimal5_2__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.DecimalField(max_digits=10, decimal_places=2) old_field.set_attributes_from_name('field') new_field = models.DecimalField(max_digits=5, decimal_places=2) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE numeric(5, 2);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_decimal10_2_to_decimal5_2__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.DecimalField(max_digits=10, decimal_places=2) old_field.set_attributes_from_name('field') new_field = models.DecimalField(max_digits=5, decimal_places=2) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE numeric(5, 2);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_decimal10_2_to_decimal20_2__ok(): with cmp_schema_editor() as editor: old_field = models.DecimalField(max_digits=10, decimal_places=2) old_field.set_attributes_from_name('field') new_field = models.DecimalField(max_digits=20, decimal_places=2) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE numeric(20, 2);', ] @pytest.mark.django_db def test_alter_field_type_decimal10_2_to_decimal10_3__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.DecimalField(max_digits=10, decimal_places=2) old_field.set_attributes_from_name('field') new_field = models.DecimalField(max_digits=10, decimal_places=3) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE numeric(10, 3);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_decimal10_2_to_decimal10_3__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.DecimalField(max_digits=10, decimal_places=2) old_field.set_attributes_from_name('field') new_field = models.DecimalField(max_digits=10, decimal_places=3) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE numeric(10, 3);', ] @pytest.mark.django_db def test_alter_field_type_decimal10_2_to_decimal10_1__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.DecimalField(max_digits=10, decimal_places=2) old_field.set_attributes_from_name('field') new_field = models.DecimalField(max_digits=10, decimal_places=1) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE numeric(10, 1);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_type_decimal10_2_to_decimal10_1__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER COLUMN TYPE is unsafe operation'): old_field = models.DecimalField(max_digits=10, decimal_places=2) old_field.set_attributes_from_name('field') new_field = models.DecimalField(max_digits=10, decimal_places=1) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" TYPE numeric(10, 1);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_set_not_null__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, null=True) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40, null=False) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_notnull" ' 'CHECK ("field" IS NOT NULL) NOT VALID;', ) + [ 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "tests_model_field_0a53d95f_notnull";', ] + timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" SET NOT NULL;' ) + timeouts( 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_0a53d95f_notnull";' ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" SET NOT NULL;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_alter_field_set_not_null__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, null=True) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40, null=False) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_notnull" ' 'CHECK ("field" IS NOT NULL) NOT VALID;', ) + flexible_statement_timeout( 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "tests_model_field_0a53d95f_notnull";', ) + timeouts( 'ALTER TABLE "tests_model" ALTER COLUMN "field" SET NOT NULL;' ) + timeouts( 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_0a53d95f_notnull";' ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" SET NOT NULL;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_filed_drop_not_null__ok(cursor, mocker): mocker.patch.object(cursor, 'execute') mocker.patch.object(cursor, 'fetchone').return_value = None with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, null=False) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40, null=True) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP NOT NULL;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_set_code_default__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40, default='test') new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) # no sql executed because django doesn't use database defaults assert editor.collected_sql == editor.django_sql assert editor.django_sql == [] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_drop_code_default__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, default='test') old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) # no sql executed because django doesn't use database defaults assert editor.collected_sql == editor.django_sql assert editor.django_sql == [] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_set_db_default__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.CharField(max_length=40, db_default='test') new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" SET DEFAULT \'test\';', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_drop_db_default__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, db_default='test') old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.CharField(max_length=40) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ALTER COLUMN "field" DROP DEFAULT;', ] @pytest.mark.django_db def test_rename_field__warning(): with cmp_schema_editor() as editor: with pytest.warns(UnsafeOperationWarning, match='ALTER TABLE RENAME COLUMN is unsafe operation'): old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('old_field') new_field = models.CharField(max_length=40) new_field.set_attributes_from_name('new_field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" RENAME COLUMN "old_field" TO "new_field";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_rename_field__raise(): with cmp_schema_editor() as editor: with pytest.raises(UnsafeOperationException, match='ALTER TABLE RENAME COLUMN is unsafe operation'): old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('old_field') new_field = models.CharField(max_length=40) new_field.set_attributes_from_name('new_field') editor.alter_field(Model, old_field, new_field) assert editor.django_sql == [ 'ALTER TABLE "tests_model" RENAME COLUMN "old_field" TO "new_field";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_remove_field__ok(): with cmp_schema_editor() as editor: field = models.CharField(max_length=40) field.set_attributes_from_name('field') editor.remove_field(Model, field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_remove_field_with_foreign_key__ok(cursor, mocker): mocker.patch.object(cursor, 'execute') mocker.patch.object(cursor, 'fetchall').side_effect = [ [ ('tests_model_field_fk', 'f', 'tests_model', 'tests_model2', ['field'], ['id']), ('tests_model_field2_fk', 'f', 'tests_model', 'tests_model2', ['field2'], ['id']), ], [ ('tests_model_field2_fk', 'f', 'tests_model', 'tests_model2', ['field2'], ['id']), ], [], ] with cmp_schema_editor() as editor: field = models.CharField(max_length=40) field.set_attributes_from_name('field') editor.remove_field(Model, field) assert editor.collected_sql == timeouts( 'SET CONSTRAINTS "tests_model_field_fk" IMMEDIATE; ' 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_fk";' ) + timeouts( 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;' ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_remove_field_with_foreign_key_backref__ok(cursor, mocker): mocker.patch.object(cursor, 'execute') mocker.patch.object(cursor, 'fetchall').side_effect = [ [ ('tests_model2_model_field_fk', 'f', 'tests_model2', 'tests_model', ['model_field'], ['field']), ('tests_model2_model_field2_fk', 'f', 'tests_model2', 'tests_model', ['model_field2'], ['field2']), ], [ ('tests_model2_model_field2_fk', 'f', 'tests_model2', 'tests_model', ['model_field2'], ['field2']), ], [], ] with cmp_schema_editor() as editor: field = models.CharField(max_length=40) field.set_attributes_from_name('field') editor.remove_field(Model, field) assert editor.collected_sql == timeouts( 'SET CONSTRAINTS "tests_model2_model_field_fk" IMMEDIATE; ' 'ALTER TABLE "tests_model2" DROP CONSTRAINT "tests_model2_model_field_fk";' ) + timeouts( 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;' ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_remove_field_with_unique_constraint__ok(cursor, mocker): mocker.patch.object(cursor, 'execute') mocker.patch.object(cursor, 'fetchall').side_effect = [ [ ('tests_model_field2_field_uniq', 'u', 'tests_model', None, ['field2', 'field'], []), ('tests_model_field2_field3_uniq', 'u', 'tests_model', None, ['field2', 'field3'], []), ], [ ('tests_model_field2_field_uniq', 'u', 'tests_model', None, ['field2', 'field'], []), ('tests_model_field2_field3_uniq', 'u', 'tests_model', None, ['field2', 'field3'], []), ], [], ] with cmp_schema_editor() as editor: field = models.CharField(max_length=40) field.set_attributes_from_name('field') editor.remove_field(Model, field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field2_field_uniq";', ) + timeouts( 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_remove_field_with_index__ok(cursor, mocker): mocker.patch.object(cursor, 'execute') mocker.patch.object(cursor, 'fetchall').side_effect = [ [], [], [ ('tests_model_field2_field_idx', 'tests_model', ['field2', 'field']), ('tests_model_field2_field3_idx', 'tests_model', ['field2', 'field3']), ], ] with cmp_schema_editor() as editor: field = models.CharField(max_length=40) field.set_attributes_from_name('field') editor.remove_field(Model, field) assert editor.collected_sql == [ 'DROP INDEX CONCURRENTLY IF EXISTS "tests_model_field2_field_idx";', ] + timeouts( 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_remove_field_with_index__with_flexible_timeout__ok(cursor, mocker): mocker.patch.object(cursor, 'execute') mocker.patch.object(cursor, 'fetchall').side_effect = [ [], [], [ ('tests_model_field2_field_idx', 'tests_model', ['field2', 'field']), ('tests_model_field2_field3_idx', 'tests_model', ['field2', 'field3']), ], ] with cmp_schema_editor() as editor: field = models.CharField(max_length=40) field.set_attributes_from_name('field') editor.remove_field(Model, field) assert editor.collected_sql == flexible_statement_timeout( 'DROP INDEX CONCURRENTLY IF EXISTS "tests_model_field2_field_idx";', ) + timeouts( 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP COLUMN "field" CASCADE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_add_constraint_check__ok(): with cmp_schema_editor() as editor: old_field = models.IntegerField() old_field.set_attributes_from_name('field') new_field = models.PositiveIntegerField() new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_check" ' 'CHECK ("field" >= 0) NOT VALID;', ) + [ 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "tests_model_field_0a53d95f_check";', ] assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_check" CHECK ("field" >= 0);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_alter_field_add_constraint_check__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: old_field = models.IntegerField() old_field.set_attributes_from_name('field') new_field = models.PositiveIntegerField() new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_check" ' 'CHECK ("field" >= 0) NOT VALID;', ) + flexible_statement_timeout( 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "tests_model_field_0a53d95f_check";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_check" CHECK ("field" >= 0);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_drop_constraint_check__ok(mocker): mocker.patch.object(connection.introspection, 'get_constraints').return_value = { 'tests_model_field_0a53d95f_check': { 'columns': ['field'], 'primary_key': False, 'unique': False, 'foreign_key': None, 'check': True, 'index': False, 'definition': None, 'options': None, } } with cmp_schema_editor() as editor: old_field = models.PositiveIntegerField() old_field.set_attributes_from_name('field') new_field = models.IntegerField() new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_0a53d95f_check";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_filed_add_constraint_foreign_key__ok(): with cmp_schema_editor() as editor: old_field = models.IntegerField() old_field.set_attributes_from_name('field_id') new_field = models.ForeignKey(Model2, on_delete=models.CASCADE) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field_id_0166400c" ON "tests_model" ("field_id");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id" ' 'FOREIGN KEY ("field_id") REFERENCES "tests_model2" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID;', ) + [ 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id";', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field_id_0166400c" ON "tests_model" ("field_id");', 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id" ' 'FOREIGN KEY ("field_id") REFERENCES "tests_model2" ("id") DEFERRABLE INITIALLY DEFERRED;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_alter_filed_add_constraint_foreign_key__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: old_field = models.IntegerField() old_field.set_attributes_from_name('field_id') new_field = models.ForeignKey(Model2, on_delete=models.CASCADE) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field_id_0166400c" ON "tests_model" ("field_id");', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id" ' 'FOREIGN KEY ("field_id") REFERENCES "tests_model2" ("id") DEFERRABLE INITIALLY DEFERRED NOT VALID;', ) + flexible_statement_timeout( 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id";', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field_id_0166400c" ON "tests_model" ("field_id");', 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_id_0166400c_fk_tests_model2_id" ' 'FOREIGN KEY ("field_id") REFERENCES "tests_model2" ("id") DEFERRABLE INITIALLY DEFERRED;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_drop_constraint_foreign_key__ok(mocker): mocker.patch.object(connection.introspection, 'get_constraints').return_value = { 'tests_model_field_0a53d95f_fk': { 'columns': ['field_id'], 'primary_key': False, 'unique': False, 'foreign_key': (Model2._meta.db_table, 'id'), 'check': False, 'index': False, 'definition': None, 'options': None, } } with cmp_schema_editor() as editor: old_field = models.ForeignKey(Model2, on_delete=models.CASCADE) old_field.set_attributes_from_name('field') new_field = models.IntegerField() new_field.set_attributes_from_name('field_id') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'SET CONSTRAINTS "tests_model_field_0a53d95f_fk" IMMEDIATE; ' 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_0a53d95f_fk";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_add_constraint_primary_key__ok(mocker): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, unique=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.CharField(max_length=40, primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field_0a53d95f_pk" ON "tests_model" ("field");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_pk" ' 'PRIMARY KEY USING INDEX "tests_model_field_0a53d95f_pk";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_pk" PRIMARY KEY ("field");', ] @pytest.mark.skipif(django.VERSION < (5, 2), reason='Composite PK were added in Django 5.2') @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_composite_primary_key_field__ok(mocker): with cmp_schema_editor() as editor: field_a = models.IntegerField(unique=True) field_a.set_attributes_from_name('field') field_a.model = Model field_b = models.IntegerField(unique=True) field_b.set_attributes_from_name('field') field_b.model = Model composite_pk = models.CompositePrimaryKey("product_id", "order_id") composite_pk.set_attributes_from_name('field') composite_pk.model = Model editor.add_field(Model, composite_pk) # Django does not support migrating to a composite primary key: # https://docs.djangoproject.com/en/5.2/topics/composite-primary-key/#migrating-to-a-composite-primary-key assert editor.collected_sql == [] assert editor.django_sql == [] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_alter_field_add_constraint_primary_key__with_flexible_timeout__ok(mocker): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, unique=True) old_field.set_attributes_from_name('field') old_field.model = Model new_field = models.CharField(max_length=40, primary_key=True) new_field.set_attributes_from_name('field') new_field.model = Model editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == flexible_statement_timeout( 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field_0a53d95f_pk" ON "tests_model" ("field");', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_pk" ' 'PRIMARY KEY USING INDEX "tests_model_field_0a53d95f_pk";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_pk" PRIMARY KEY ("field");', ] @pytest.mark.skipif(django.VERSION < (5, 2), reason='Composite PK were added in Django 5.2') @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_composite_primary_key_field__with_flexible_timeout__ok(mocker): with cmp_schema_editor() as editor: field_a = models.IntegerField(unique=True) field_a.set_attributes_from_name('field') field_a.model = Model field_b = models.IntegerField(unique=True) field_b.set_attributes_from_name('field') field_b.model = Model composite_pk = models.CompositePrimaryKey("product_id", "order_id") composite_pk.set_attributes_from_name('field') composite_pk.model = Model editor.add_field(Model, composite_pk) # Django does not support migrating to a composite primary key: # https://docs.djangoproject.com/en/5.2/topics/composite-primary-key/#migrating-to-a-composite-primary-key assert editor.collected_sql == [] assert editor.django_sql == [] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_drop_constraint_primary_key__ok(mocker): mocker.patch.object(connection.introspection, 'get_constraints').return_value = { 'tests_model_field_0a53d95f_pk': { 'columns': ['field'], 'primary_key': True, 'unique': True, 'foreign_key': None, 'check': False, 'index': False, 'definition': None, 'options': None, } } with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, primary_key=True) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_0a53d95f_pk";', ) + [ 'DROP INDEX CONCURRENTLY IF EXISTS "tests_model_field_0a53d95f_like";', ] assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_0a53d95f_pk";', 'DROP INDEX IF EXISTS "tests_model_field_0a53d95f_like";', ] @pytest.mark.skipif(django.VERSION < (5, 2), reason='Composite PK were added in Django 5.2') @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_drop_composite_primary_key_field__ok(mocker): with cmp_schema_editor() as editor: field_a = models.IntegerField(unique=True) field_a.set_attributes_from_name('field') field_a.model = Model field_b = models.IntegerField(unique=True) field_b.set_attributes_from_name('field') field_b.model = Model composite_pk = models.CompositePrimaryKey("product_id", "order_id") composite_pk.set_attributes_from_name('field') composite_pk.model = Model editor.remove_field(Model, composite_pk) # Django does not support migrating from a composite primary key: # https://docs.djangoproject.com/en/5.2/topics/composite-primary-key/#migrating-to-a-composite-primary-key assert editor.collected_sql == [] assert editor.django_sql == [] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_add_constraint_unique__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40, unique=True) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field_0a53d95f_uniq" ON "tests_model" ("field");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_uniq" ' 'UNIQUE USING INDEX "tests_model_field_0a53d95f_uniq";', ) + [ 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f_like" ' 'ON "tests_model" ("field" varchar_pattern_ops);', ] assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_uniq" UNIQUE ("field");', 'CREATE INDEX "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_alter_field_add_constraint_unique__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40, unique=True) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == flexible_statement_timeout( 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field_0a53d95f_uniq" ON "tests_model" ("field");', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_uniq" ' 'UNIQUE USING INDEX "tests_model_field_0a53d95f_uniq";', ) + flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f_like" ' 'ON "tests_model" ("field" varchar_pattern_ops);', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field_0a53d95f_uniq" UNIQUE ("field");', 'CREATE INDEX "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_alter_field_drop_constraint_unique__ok(mocker): mocker.patch.object(connection.introspection, 'get_constraints').return_value = { 'tests_model_field_0a53d95f_uniq': { 'columns': ['field'], 'primary_key': False, 'unique': True, 'foreign_key': None, 'check': False, 'index': False, 'definition': None, 'options': None, } } with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, unique=True) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_0a53d95f_uniq";', ) + [ 'DROP INDEX CONCURRENTLY IF EXISTS "tests_model_field_0a53d95f_like";', ] assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_0a53d95f_uniq";', 'DROP INDEX IF EXISTS "tests_model_field_0a53d95f_like";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_index__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40, db_index=True) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f" ON "tests_model" ("field");', 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field_0a53d95f" ON "tests_model" ("field");', 'CREATE INDEX "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_index__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40, db_index=True) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f" ON "tests_model" ("field");', ) + flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field_0a53d95f" ON "tests_model" ("field");', 'CREATE INDEX "tests_model_field_0a53d95f_like" ON "tests_model" ("field" varchar_pattern_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_remove_index__ok(mocker): mocker.patch.object(connection.introspection, 'get_constraints').return_value = { 'tests_model_field_idx': { 'columns': ['field'], 'orders': ['ASC'], 'primary_key': False, 'unique': False, 'foreign_key': None, 'check': False, 'index': True, 'type': 'idx', 'definition': None, 'options': None, } } with cmp_schema_editor() as editor: old_field = models.CharField(max_length=40, db_index=True) old_field.set_attributes_from_name('field') new_field = models.CharField(max_length=40) new_field.set_attributes_from_name('field') editor.alter_field(Model, old_field, new_field) assert editor.collected_sql == [ 'DROP INDEX CONCURRENTLY IF EXISTS "tests_model_field_idx";', ] assert editor.django_sql == [ 'DROP INDEX IF EXISTS "tests_model_field_idx";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_unique_together__ok(mocker): with cmp_schema_editor() as editor: editor.alter_unique_together(Model, [], [['field1', 'field2']]) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field1_field2_51878e08_uniq" ' 'ON "tests_model" ("field1", "field2");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field1_field2_51878e08_uniq" ' 'UNIQUE USING INDEX "tests_model_field1_field2_51878e08_uniq";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field1_field2_51878e08_uniq" ' 'UNIQUE ("field1", "field2");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_unique_together__with_flexible_timeout__ok(mocker): with cmp_schema_editor() as editor: editor.alter_unique_together(Model, [], [['field1', 'field2']]) assert editor.collected_sql == flexible_statement_timeout( 'CREATE UNIQUE INDEX CONCURRENTLY "tests_model_field1_field2_51878e08_uniq" ' 'ON "tests_model" ("field1", "field2");', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field1_field2_51878e08_uniq" ' 'UNIQUE USING INDEX "tests_model_field1_field2_51878e08_uniq";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "tests_model_field1_field2_51878e08_uniq" ' 'UNIQUE ("field1", "field2");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_remove_unique_together__ok(mocker): mocker.patch.object(connection.introspection, 'get_constraints').return_value = { 'tests_model_field_idx': { 'columns': ['field1', 'field2'], 'primary_key': False, 'unique': True, 'foreign_key': None, 'check': False, 'index': False, 'definition': None, 'options': None, } } with cmp_schema_editor() as editor: editor.alter_unique_together(Model, [['field1', 'field2']], []) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP CONSTRAINT "tests_model_field_idx";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_index_together__ok(mocker): with cmp_schema_editor() as editor: editor.alter_index_together(Model, [], [['field1', 'field2']]) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_field2_51878e08_idx" ' 'ON "tests_model" ("field1", "field2");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_field2_51878e08_idx" ON "tests_model" ("field1", "field2");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_index_together__with_flexible_timeout__ok(mocker): with cmp_schema_editor() as editor: editor.alter_index_together(Model, [], [['field1', 'field2']]) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field1_field2_51878e08_idx" ' 'ON "tests_model" ("field1", "field2");', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_field2_51878e08_idx" ON "tests_model" ("field1", "field2");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_remove_index_together__ok(mocker): mocker.patch.object(connection.introspection, 'get_constraints').return_value = { 'tests_model_field_idx': { 'columns': ['field1', 'field2'], 'orders': ['ASC', 'ASC'], 'primary_key': False, 'unique': False, 'foreign_key': None, 'check': False, 'index': True, 'type': 'idx', 'definition': None, 'options': None, } } with cmp_schema_editor() as editor: editor.alter_index_together(Model, [['field1', 'field2']], []) assert editor.collected_sql == [ 'DROP INDEX CONCURRENTLY IF EXISTS "tests_model_field_idx";', ] assert editor.django_sql == [ 'DROP INDEX IF EXISTS "tests_model_field_idx";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_check_constraint__ok(): with cmp_schema_editor() as editor: editor.add_constraint(Model, models.CheckConstraint(check=models.Q(field1__gt=0), name='field1_gt_0')) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_gt_0" ' 'CHECK ("field1" > 0) NOT VALID;', ) + [ 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "field1_gt_0";', ] assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_gt_0" CHECK ("field1" > 0);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_check_constraint__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_constraint(Model, models.CheckConstraint(check=models.Q(field1__gt=0), name='field1_gt_0')) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_gt_0" ' 'CHECK ("field1" > 0) NOT VALID;', ) + flexible_statement_timeout( 'ALTER TABLE "tests_model" VALIDATE CONSTRAINT "field1_gt_0";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_gt_0" CHECK ("field1" > 0);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_drop_meta_check_constraint__ok(): with cmp_schema_editor() as editor: editor.remove_constraint(Model, models.CheckConstraint(check=models.Q(field1__gt=0), name='field1_gt_0')) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" DROP CONSTRAINT "field1_gt_0";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP CONSTRAINT "field1_gt_0";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint__ok(): with cmp_schema_editor() as editor: editor.add_constraint(Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq')) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" ' 'UNIQUE USING INDEX "field1_uniq";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" UNIQUE ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_unique_constraint__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_constraint(Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq')) assert editor.collected_sql == flexible_statement_timeout( 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1");', ) + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" ' 'UNIQUE USING INDEX "field1_uniq";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" UNIQUE ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_multicolumn_unique_constraint__ok(): with cmp_schema_editor() as editor: editor.add_constraint(Model, models.UniqueConstraint(fields=('field1', 'field2'), name='field1_field2_uniq')) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_field2_uniq" ON "tests_model" ("field1", "field2");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_field2_uniq" ' 'UNIQUE USING INDEX "field1_field2_uniq";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_field2_uniq" UNIQUE ("field1", "field2");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_conditional_unique_constraint__ok(): with cmp_schema_editor() as editor: editor.add_constraint(Model, models.UniqueConstraint( fields=('field1',), name='field1_uniq', condition=models.Q(field1__gt=0))) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1") WHERE "field1" > 0;', ] assert editor.django_sql == [ 'CREATE UNIQUE INDEX "field1_uniq" ON "tests_model" ("field1") WHERE "field1" > 0;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_conditional_multicolumn_unique_constraint__ok(): with cmp_schema_editor() as editor: editor.add_constraint(Model, models.UniqueConstraint( fields=('field1', 'field2'), name='field1_field2_uniq', condition=models.Q(field1=models.F('field2')))) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_field2_uniq" ON "tests_model" ("field1", "field2") ' 'WHERE "field1" = ("field2");', ] assert editor.django_sql == [ 'CREATE UNIQUE INDEX "field1_field2_uniq" ON "tests_model" ("field1", "field2") ' 'WHERE "field1" = ("field2");', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_nulls_distinct_fields__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq', nulls_distinct=True), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1") NULLS DISTINCT;', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" ' 'UNIQUE USING INDEX "field1_uniq";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" UNIQUE NULLS DISTINCT ("field1");', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_nulls_not_distinct_fields__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq', nulls_distinct=False), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1") NULLS NOT DISTINCT;', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" ' 'UNIQUE USING INDEX "field1_uniq";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" UNIQUE NULLS NOT DISTINCT ("field1");', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_nulls_distinct_expression__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(models.F('field1'), name='field1_uniq', nulls_distinct=True), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1") NULLS DISTINCT;', ] assert editor.django_sql == [ 'CREATE UNIQUE INDEX "field1_uniq" ON "tests_model" ("field1") NULLS DISTINCT;', ] @pytest.mark.django_db @pytest.mark.skipif(django.VERSION[:2] < (5, 0), reason='functionality provided in django 5.0') @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_nulls_not_distinct_expression__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(models.F('field1'), name='field1_uniq', nulls_distinct=False), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1") NULLS NOT DISTINCT;', ] assert editor.django_sql == [ 'CREATE UNIQUE INDEX "field1_uniq" ON "tests_model" ("field1") NULLS NOT DISTINCT;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_deferrable_deferred__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq', deferrable=models.Deferrable.DEFERRED), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" ' 'UNIQUE USING INDEX "field1_uniq" DEFERRABLE INITIALLY DEFERRED;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_uniq" UNIQUE ("field1") DEFERRABLE INITIALLY DEFERRED;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_deferrable_immediate__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(fields=('field2',), name='field2_uniq', deferrable=models.Deferrable.IMMEDIATE), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field2_uniq" ON "tests_model" ("field2");', ] + timeouts( 'ALTER TABLE "tests_model" ADD CONSTRAINT "field2_uniq" ' 'UNIQUE USING INDEX "field2_uniq" DEFERRABLE INITIALLY IMMEDIATE;', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field2_uniq" UNIQUE ("field2") DEFERRABLE INITIALLY IMMEDIATE;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_include__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq', include=['field2']), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1") INCLUDE ("field2");', ] assert editor.django_sql == [ 'CREATE UNIQUE INDEX "field1_uniq" ON "tests_model" ("field1") INCLUDE ("field2");' ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_opclasses__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq', opclasses=['int4_ops']), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1" int4_ops);', ] assert editor.django_sql == [ 'CREATE UNIQUE INDEX "field1_uniq" ON "tests_model" ("field1" int4_ops);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_unique_constraint_condition__ok(): with cmp_schema_editor() as editor: editor.add_constraint( Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq', condition=models.Q(field1__lt=1)), ) assert editor.collected_sql == [ 'CREATE UNIQUE INDEX CONCURRENTLY "field1_uniq" ON "tests_model" ("field1") WHERE "field1" < 1;', ] assert editor.django_sql == [ 'CREATE UNIQUE INDEX "field1_uniq" ON "tests_model" ("field1") WHERE "field1" < 1;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_drop_meta_unique_constraint__ok(): with cmp_schema_editor() as editor: editor.remove_constraint(Model, models.UniqueConstraint(fields=('field1',), name='field1_uniq')) assert editor.collected_sql == timeouts( 'ALTER TABLE "tests_model" DROP CONSTRAINT "field1_uniq";', ) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP CONSTRAINT "field1_uniq";', ] @pytest.mark.django_db def test_add_meta_exclusion_constraint__warning(): with pytest.warns(UnsafeOperationWarning, match='ADD CONSTRAINT EXCLUDE is unsafe operation'): with cmp_schema_editor() as editor: editor.add_constraint(Model, ExclusionConstraint(expressions=[('field1', '=')], name='field1_excluded')) assert editor.collected_sql == editor.django_sql assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_excluded" EXCLUDE USING GIST ("field1" WITH =);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_exclusion_constraint__raise(): with pytest.raises(UnsafeOperationException, match='ADD CONSTRAINT EXCLUDE is unsafe operation'): with cmp_schema_editor() as editor: editor.add_constraint(Model, ExclusionConstraint(expressions=[('field1', '=')], name='field1_excluded')) assert editor.django_sql == [ 'ALTER TABLE "tests_model" ADD CONSTRAINT "field1_excluded" EXCLUDE USING GIST ("field1" WITH =);', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_drop_meta_exclusion_constraint__ok(): with cmp_schema_editor() as editor: editor.remove_constraint(Model, ExclusionConstraint(expressions=[('field1', '=')], name='field1_excluded')) assert editor.collected_sql == timeouts(editor.django_sql) assert editor.django_sql == [ 'ALTER TABLE "tests_model" DROP CONSTRAINT "field1_excluded";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, models.Index(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ' 'ON "tests_model" ("field1");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_index__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, models.Index(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ' 'ON "tests_model" ("field1");', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_multicolumn_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, models.Index(fields=['field1', 'field2'], name='tests_model_field1_45bc7f_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_45bc7f_idx" ' 'ON "tests_model" ("field1", "field2");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_45bc7f_idx" ON "tests_model" ("field1", "field2");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_conditional_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, models.Index(condition=models.Q(field1__gt=0), fields=['field1'], name='field1_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "field1_idx" ON "tests_model" ("field1") WHERE "field1" > 0;', ] assert editor.django_sql == [ 'CREATE INDEX "field1_idx" ON "tests_model" ("field1") WHERE "field1" > 0;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_conditional_multicolumn_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, models.Index(condition=models.Q(field1__gt=0), fields=['field1', 'field2'], name='field1_field2_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "field1_field2_idx" ON "tests_model" ("field1", "field2") WHERE "field1" > 0;', ] assert editor.django_sql == [ 'CREATE INDEX "field1_field2_idx" ON "tests_model" ("field1", "field2") WHERE "field1" > 0;', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_index_concurrently__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, models.Index(fields=['field1'], name='tests_model_field1_9b60dc_idx'), concurrently=True) assert editor.collected_sql == editor.django_sql assert editor.django_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" ("field1");' ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_index_concurrently__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, models.Index(fields=['field1'], name='tests_model_field1_9b60dc_idx'), concurrently=True) assert editor.collected_sql == flexible_statement_timeout(editor.django_sql) assert editor.django_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" ("field1");' ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_drop_meta_index__ok(): with cmp_schema_editor() as editor: editor.remove_index(Model, models.Index(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == [ 'DROP INDEX CONCURRENTLY IF EXISTS "tests_model_field1_9b60dc_idx";', ] assert editor.django_sql == [ 'DROP INDEX IF EXISTS "tests_model_field1_9b60dc_idx";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_drop_meta_index_concurrently__ok(): with cmp_schema_editor() as editor: editor.remove_index(Model, models.Index(fields=['field1'], name='tests_model_field1_9b60dc_idx'), concurrently=True) assert editor.collected_sql == editor.django_sql assert editor.django_sql == [ 'DROP INDEX CONCURRENTLY IF EXISTS "tests_model_field1_9b60dc_idx";', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_brin_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, BrinIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING brin ("field1");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING brin ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_brin_index__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, BrinIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING brin ("field1");', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING brin ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_btree_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, BTreeIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING btree ("field1");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING btree ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_btree_index__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, BTreeIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING btree ("field1");', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING btree ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_gin_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, GinIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING gin ("field1");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING gin ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_gin_index__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, GinIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING gin ("field1");', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING gin ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_gist_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, GistIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING gist ("field1");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING gist ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_gist_index__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, GistIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING gist ("field1");', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING gist ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_hash_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, HashIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING hash ("field1");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING hash ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_hash_index__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, HashIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING hash ("field1");', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING hash ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True) def test_add_meta_spgist_index__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, SpGistIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == [ 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING spgist ("field1");', ] assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING spgist ("field1");', ] @pytest.mark.django_db @override_settings(ZERO_DOWNTIME_MIGRATIONS_RAISE_FOR_UNSAFE=True, ZERO_DOWNTIME_MIGRATIONS_FLEXIBLE_STATEMENT_TIMEOUT=True) def test_add_meta_spgist_index__with_flexible_timeout__ok(): with cmp_schema_editor() as editor: editor.add_index(Model, SpGistIndex(fields=['field1'], name='tests_model_field1_9b60dc_idx')) assert editor.collected_sql == flexible_statement_timeout( 'CREATE INDEX CONCURRENTLY "tests_model_field1_9b60dc_idx" ON "tests_model" USING spgist ("field1");', ) assert editor.django_sql == [ 'CREATE INDEX "tests_model_field1_9b60dc_idx" ON "tests_model" USING spgist ("field1");', ] ================================================ FILE: tox.ini ================================================ [tox] envlist = py{3.10,3.11,3.12,3.13}-django{5.1,5.2}-psycopg{2,3} py{3.10,3.11,3.12}-django{5.0}-psycopg{2,3} py{3.8,3.9,3.10,3.11,3.12}-django{4.2}-psycopg{2,3} [testenv] usedevelop = True allowlist_externals = bash commands = # linters py{3.13}-django{5.2}-psycopg{3}: flake8 py{3.13}-django{5.2}-psycopg{3}: isort . --check --diff # unit tests py{3.8,3.9,3.10,3.11,3.12,3.13}-django{4.2,5.0,5.1,5.2}-psycopg{2,3}: bash -c "DB_HOST=pg17 DB_USER=test pytest tests/unit" py{3.8,3.9,3.10,3.11,3.12,3.13}-django{4.2,5.0,5.1,5.2}-psycopg{2,3}: bash -c "DB_HOST=postgis17 DB_USER=root DB_ENGINE=django_zero_downtime_migrations.backends.postgis pytest tests/unit" # integration tests py{3.13}-django{5.2}-psycopg{3}: bash -c "DB_HOST=pg17 DB_USER=test DB_ENGINE=django.db.backends.postgresql pytest tests/integration" py{3.13}-django{5.2}-psycopg{3}: bash -c "DB_HOST=pg17 DB_USER=test DB_SUPER_USER=root pytest tests/integration" py{3.13}-django{5.2}-psycopg{3}: bash -c "DB_HOST=postgis17 DB_USER=root DB_ENGINE=django_zero_downtime_migrations.backends.postgis pytest tests/integration" # old psycopg version support integration tests py{3.13}-django{5.2}-psycopg{2}: bash -c "DB_HOST=pg17 DB_USER=test DB_SUPER_USER=root pytest tests/integration" # old postgres version support integration tests py{3.13}-django{5.2}-psycopg{3}: bash -c "DB_HOST=pg16 DB_USER=test DB_SUPER_USER=root pytest tests/integration" py{3.13}-django{5.2}-psycopg{3}: bash -c "DB_HOST=pg15 DB_USER=test DB_SUPER_USER=root pytest tests/integration" py{3.13}-django{5.2}-psycopg{3}: bash -c "DB_HOST=pg14 DB_USER=test DB_SUPER_USER=root pytest tests/integration" py{3.13}-django{5.1}-psycopg{3}: bash -c "DB_HOST=pg13 DB_USER=test DB_SUPER_USER=root pytest tests/integration" py{3.12}-django{5.0}-psycopg{3}: bash -c "DB_HOST=pg12 DB_USER=test DB_SUPER_USER=root pytest tests/integration" # old django version support integration tests py{3.13}-django{5.1}-psycopg{3}: bash -c "DB_HOST=pg17 DB_USER=test DB_SUPER_USER=root pytest tests/integration" py{3.12}-django{5.0}-psycopg{3}: bash -c "DB_HOST=pg17 DB_USER=test DB_SUPER_USER=root pytest tests/integration" py{3.12}-django{4.2}-psycopg{3}: bash -c "DB_HOST=pg17 DB_USER=test DB_SUPER_USER=root pytest tests/integration" deps = py{3.13}-django{5.2}-psycopg{3}: flake8 py{3.13}-django{5.2}-psycopg{3}: isort pytest pytest-django pytest-mock psycopg2: psycopg2-binary psycopg3: psycopg[binary] django4.2: django>=4.2,<5.0 django5.0: django>=5.0,<5.1 django5.1: django>=5.1,<5.2 django5.2: django>=5.2,<6.0