Skip to content
Snippets Groups Projects
Commit d718bd15 authored by Michael Fladischer's avatar Michael Fladischer
Browse files

importing python-django-pgtrigger_4.7.0.orig.tar.gz

parents
No related branches found
No related tags found
No related merge requests found
version: 2.1
orbs:
opus10:
executors:
python-pg:
parameters:
pg_version:
type: "string"
default: "14.4"
working_directory: /code
docker:
- image: opus10/circleci-public-django-app:2023-06-04
environment:
# Ensure makefile commands are not wrapped in "docker-compose run"
EXEC_WRAPPER: ''
DATABASE_URL: postgres://root@localhost/circle_test?sslmode=disable
TOX_PARALLEL_NO_SPINNER: 1
- image: cimg/postgres:<<parameters.pg_version>>
environment:
POSTGRES_USER: root
POSTGRES_DB: circle_test
POSTGRES_PASSWORD: password
commands:
test:
steps:
- checkout
- restore_cache:
key: v4-{{ checksum "poetry.lock" }}
- run: make dependencies
- run: make full-test-suite
- save_cache:
key: v4-{{ checksum "poetry.lock" }}
paths:
- /home/circleci/.cache/pypoetry/
- /code/.venv
- /code/.tox
jobs:
test_pg_min:
executor:
name: opus10/python-pg
pg_version: "13.11"
steps:
- opus10/test
test_pg_max:
executor:
name: opus10/python-pg
pg_version: "15.2"
steps:
- opus10/test
lint:
executor: opus10/python-pg
steps:
- checkout
- restore_cache:
key: v4-{{ checksum "poetry.lock" }}
- run: make dependencies
- run: make lint
check_changelog:
executor: opus10/python-pg
steps:
- checkout
- restore_cache:
key: v4-{{ checksum "poetry.lock" }}
- run: make dependencies
- run: git tidy-log origin/master..
- run: make tidy-lint
deploy:
executor: opus10/python-pg
steps:
- checkout
- run: ssh-add -D
- run: echo "${GITHUB_DEVOPS_PRIVATE_SSH_KEY_BASE64}" | base64 --decode | ssh-add - > /dev/null
- restore_cache:
key: v4-{{ checksum "poetry.lock" }}
- run: make dependencies
- run: poetry run python devops.py deploy
workflows:
version: 2
on_commit:
jobs:
- test_pg_min
- test_pg_max
- lint
- check_changelog:
filters:
branches:
ignore: master
- deploy:
context: public-django-app
requires:
- test_pg_min
- test_pg_max
- lint
filters:
branches:
only: master
root = true
[*]
charset = utf-8
end_of_line = lf
indent_size = 4
indent_style = space
insert_final_newline = true
trim_trailing_whitespace = true
[*.{yaml,yml}]
indent_size = 2
[makefile]
indent_style = tab
# Remember - commit messages are used to generate release notes!
# Use the following template when writing a commit message or
# use "git tidy-commit" to commit a properly-formatted message.
#
# ---- Commit Message Format ----
#
# {{ schema.summary.help }}
#
# {{ schema.description.help }}
#
{% for entry in schema %}
{% if entry.label not in ['summary', 'description'] %}
# {{ entry.label.replace('_', '-').title() }}: {{ entry.help }}
{% endif %}
{% endfor %}
\ No newline at end of file
- label: type
name: Type
help: The type of change.
type: string
choices:
- api-break
- bug
- feature
- trivial
- label: summary
name: Summary
help: A high-level summary of the changes.
type: string
- label: description
name: Description
help: An in-depth description of the changes.
type: string
condition: ['!=', 'type', 'trivial']
multiline: True
required: False
{% if output == ':github/pr' %}
**Heads up!** This is what the release notes will look like based on the commits.
{% endif %}
{% if not range %}
# Changelog
{% endif %}
{% for tag, commits_by_tag in commits.exclude('summary', '.*\[skip ci\].*', match=True).group('tag').items() %}
## {{ tag|default('Unreleased', True) }} {% if tag.date %}({{ tag.date.date() }}){% endif %}
{% for type, commits_by_type in commits_by_tag.group('type', ascending_keys=True, none_key_last=True).items() %}
### {{ type|default('Other', True)|title }}
{% for commit in commits_by_type %}
{% if not commit.is_parsed %}
- {{ commit.sha[:7] }}: Commit could not be parsed.
{% else %}
- {{ commit.summary }} [{{ commit.author_name }}, {{ commit.sha[:7] }}]
{% if commit.description %}
{{ commit.description|indent(4) }}
{% endif %}
{% endif %}
{% endfor %}
{% endfor %}
{% endfor %}
\ No newline at end of file
# Remember - commit messages are used to generate release notes!
# Use the following template when writing a commit message or
# use "git tidy-commit" to commit a properly-formatted message.
#
# ---- Commit Message Format ----
#
# A high-level summary of the changes.
#
# An in-depth description of the changes.
#
# Type: The type of change.
version: 2
python:
version: 3.8
install:
- requirements: docs/requirements.txt
# Changelog
## 4.7.0 (2023-06-08)
### Feature
- Added Python 3.11, Django 4.2, and Psycopg 3 support [Wesley Kendall, 27dc243]
Python 3.11, Django 4.2, and Psycopg 3 are now supported and tested.
Django 2.2 support has been dropped.
## 4.6.0 (2022-10-07)
### Feature
- Added ``pgtrigger.Func`` for accessing model properties in function declarations. [Wesley Kendall, 4bd6abf]
When describing a trigger in ``Meta``, it's not possible to access model meta properties
like ``db_table``. ``pgtrigger.Func`` solves this by exposing ``meta``, ``fields``,
and ``columns`` variables that can be used in a format string.
See the trigger cookbook in the docs for an example.
- Added ``ReadOnly`` trigger for uneditable models and fields [Wesley Kendall, 0a3c162]
The ``pgtrigger.ReadOnly`` trigger protects updates on models and takes
an optional ``fields`` or ``exclude`` argument to specify which fields are
read only. If no arguments are provided, the entire model is read only.
### Trivial
- Updated with latest Django template [Wesley Kendall, 84b46f1]
## 4.5.3 (2022-09-19)
### Trivial
- Fix typo in documentation [Francisco Couzo, def5432]
- Fix issues when using Django's dummy database. [Wesley Kendall, cc1cb95]
- Fixed minor documentation typos [Wes Kendall, dc473ff]
## 4.5.2 (2022-09-06)
### Trivial
- Add Soft-Delete Model Manager example to docs [Jason Oppel, 3a46ae7]
## 4.5.1 (2022-09-01)
### Trivial
- Remove unused migration code and restructure docs [Wes Kendall, a8793fc]
- Optimize test suite [Wes Kendall, 863fa93]
## 4.5.0 (2022-08-31)
### Bug
- Migrations properly serialize dynamic triggers and add better support for reverse migrations [Wes Kendall, 2eb3014]
Triggers that override ``get_func`` or otherwise generate dynamic SQL are properly reflected
in migrations when the underlying implementation changes. Along with this, migrations now serialize
SQL objects instead of trigger classes, making it more robust when reversing migrations or
updating underlying implementations of existing triggers.
This change updates the hashes of all triggers and thus re-creates all triggers when running
``makemigrations`` or when manually installing them.
## 4.4.0 (2022-08-27)
### Bug
- Pruning/installations fixed for Postgres versions 12 and under. [Wes Kendall, 22d60e9]
Paritioned table support introduced a bug in using trigger management
commands for Postgres 12 and under. This has been fixed.
### Trivial
- Local development enhancements [Wes Kendall, a4d3c9c]
## 4.3.4 (2022-08-26)
### Trivial
- Test against Django 4.1 and other CI improvements [Wes Kendall, 813f67e]
## 4.3.3 (2022-08-24)
### Trivial
- Fix ReadTheDocs builds [Wes Kendall, 3870643]
## 4.3.2 (2022-08-20)
### Trivial
- Fix release note rendering and code formatting changes [Wes Kendall, c834606]
## 4.3.1 (2022-08-19)
### Trivial
- Fixed ReadTheDocs builds [Wes Kendall, 2cd0c9e]
## 4.3.0 (2022-08-18)
### Feature
- Support for partitioned tables [Wes Kendall, 863b8cb]
Installation commands and all core trigger functionality works with partitioned tables.
Users will need to run
``python manage.py pgtrigger install`` to upgrade existing trigger installations,
otherwise they will appear as outdated when running ``python manage.py pgtrigger ls``.
Although outdated triggers will still run successfully for non-partitioned tables, this
backwards compatibility will be removed in version 5.
## 4.2.1 (2022-08-18)
### Trivial
- Do schema editor patching in ``App.ready()`` instead of module load [Wes Kendall, cce99ce]
## 4.2.0 (2022-08-18)
### Bug
- Ensure interoperability with other Postgres backends [Wes Kendall, 1c4f480]
``django-pgtrigger`` trigger migrations are interoperable with Postgis and
other Postgres-flavored database backends.
## 4.1.0 (2022-08-17)
### Bug
- Allow altering columns from trigger conditions [Wes Kendall, 1178457]
Previously if one changed the column type of a field used in a trigger condition,
installation would fail because Postgres doesn't allow this.
The schema editor was patched to allow for this behavior, dropping and recreating
triggers when column types are altered.
## 4.0.1 (2022-08-15)
### Trivial
- Fixed minor issue in settings preventing docs from being built [Wes Kendall, 5ad18f8]
## 4.0.0 (2022-08-15)
### Api-Break
- Multi-database and registry behavior changed [Wes Kendall, 0663807]
There were four key additions around multi-datbase and multi-schema
support:
1. When using a multi-datbase environment, ``django-pgtrigger``
now uses ``allow_migrate`` of the router rather than ``db_for_write``
to determine if a trigger should be installed for a model.
2. Management commands were changed to operate on one database at a time
to be consistent with Django management commands. Install, uninstall,
prune, disable, enable, and ls all take an optional ``--database``
argument.
3. ``pgtrigger.ignore``, ``pgtrigger.constraints``, and ``pgtrigger.schema``
were all updated to take a ``databases`` argument, defaulting to
working on every postgres database when used for dynamic runtime behavior.
4. The Postgres function used by ``pgtrigger.ignore`` is always installed
in the public schema by default. It is referenced using its fully-qualified
path. The schema can be changed with ``settings.PGTRIGGER_SCHEMA``. Setting
it to ``None`` will use the schema in the search path. Because of this
change, the SQL for installed triggers changes, which causes triggers to
appear as outdated when listing them. This can be fixed by running
``manage.py pgtrigger install`` to re-install triggers.
Along with this, there were a few other breaking changes to the API:
1. ``pgtrigger.get`` was renamed to ``pgtrigger.registered``.
2. ``manage.py pgtrigger ls`` shows the trigger status followed by the URI in
each line of output.
type: api-break
### Bug
- Reference ``UpdateSearchVector`` trigger columns correctly [Wes Kendall, 7d40894]
Columns configured in the ``UpdateSearchVector`` trigger were previously
referenced in SQL by their model field name and not their column name.
### Feature
- Added multi-schema support [Wes Kendall, 98342f2]
``django-pgtrigger`` didn't handle multiple schemas well, causing some issues for
legacy installation commands.
Multiple schema support is a first-class citizen. Depending on the database setup, you
can now take advantage of the ``--schema`` options for management commands to
dynamically set the schema.
Docs were added that overview multi-schema support.
### Trivial
- Added docs for using triggers in abstract models [Wes Kendall, cd215ac]
- Refactored project structure [Wes Kendall, 4d53eef]
## 3.4.0 (2022-08-11)
### Bug
- Fixed issues using ``pgtrigger.ignore`` with multiple databases [Wes Kendall, 557f0e1]
``pgtrigger.ignore`` now uses the connection of the database router
when ignoring triggers.
### Feature
- Add ``pgtrigger.UpdateSearchVector`` to keep search vectors updated [Wes Kendall, 671e8be]
When using Django's full-text search, one can keep a
``SearchVectorField`` updated with the relevant document fields
by using ``pgtrigger.UpdateSearchVector``.
An example was added to the trigger cookbook.
- Added ``pgtrigger.constraints`` for runtime configuration of deferrable triggers [Wes Kendall, 4b77b7b]
``pgtrigger.constraints`` mimics Postgres's ``SET CONSTRAINTS`` statement, allowing one
to dynamically modify when a deferrable trigger runs.
Documentation was also added for deferrable triggers with an example in the cookbook.
- Added deferrable triggers [Wes Kendall, fe4f16e]
Triggers now have an optional ``timing`` argument. If set, triggers
will be created as "CONSTRAINT" triggers that can be deferred.
When ``timing`` is set to ``pgtrigger.Immediate``, the trigger will
run at the end of a statement. ``pgtrigger.Deferred`` will cause
the trigger to run at the end of the transaction.
Note that deferrable triggers must have both
``pgtrigger.After`` and ``pgtrigger.Row`` values set for the
``when`` and ``level`` attributes.
## 3.3.0 (2022-08-10)
### Bug
- Fixes ignoring triggers with nested transactions [Wes Kendall, d32113d]
``pgtrigger.ignore`` avoids injecting SQL when transactions are in a failed
state, allowing for one to use nested transactions while ignoring triggers.
- Fixed issue re-installing triggers with different conditions. [Wes Kendall, 68e29d2]
Triggers with conditions that change were not successfully
re-installed with ``pgtrigger.install``. Note that this only affects
legacy installation and not installation with the new migration system.
## 3.2.0 (2022-08-08)
### Feature
- Support proxy models on default many-to-many "through" relationships. [Wes Kendall, 4cb0f65]
Previously one had to use an unmanaged model to declare triggers on default
many-to-many "through" relationships. Users can now define a proxy model
on these instead.
Support for unmanaged models was dropped.
## 3.1.0 (2022-08-08)
### Api-Break
- Integration with Django's migration system. [Wes Kendall, 6916c14]
Triggers are fully integrated with Django's migration system, and they are no longer
installed at the end of migrations by default. Users instead need to run
``python manage.py makemigrations`` to make trigger migrations for their applications.
Triggers for models in third-party apps are declared with proxy models. Triggers
for default many-to-many "through" models are declared with unmanaged models.
For instructions on upgrading or preserving legacy behavior, see the frequently
asked questions of the docs.
### Bug
- Fixed issues with proxy models and M2M "through" models. [Wes Kendall, 52aa81f]
Proxy models weren't creating migrations, and M2M "through" models are
handled by making an unmanaged model that points to the right DB table.
### Feature
- Remove dependency on ``django-pgconnection``. [Wes Kendall, af0c908]
Users no longer have to wrap ``settings.DATABASES`` with
``django-pgconnection`` in order to use the ``pgtrigger.ignore``
function.
## 2.5.1 (2022-07-31)
### Trivial
- Updated with latest Django template, fixing doc builds [Wes Kendall, 4b175a4]
## 2.5.0 (2022-07-30)
### Bug
- Ignore non-postgres databases in global operations [Wes Kendall, a1aff5d]
Some operations, such as pruning triggers, would iterate over all databases
in a project, including non-postgres ones. This fix ignores non-postgres
databases.
- Fixes transaction leak when using ``pgtrigger.ignore()`` [Wes Kendall, 1501d7e]
``pgtrigger.ignore()`` would continue to ignore triggers until the end of the
transaction once the context manager exited. This is now fixed.
- Fixed more issues related to custom table names [Wes Kendall, a0e1f6d]
Fixes and test cases were added for custom table names that collide
with reserved words.
- Wrap table names to avoid SQL command conflicts [Zac Miller, 86ee983]
Prevents models/tables with names like Order from causing Syntax errors
and add PyCharm .idea/ folder to .gitignore
### Feature
- Triggers can be specified in model Meta options [Wes Kendall, 5c1cfec]
Triggers can now be specified with the ``triggers`` attribute of a model's Meta
options. This still works alongside the old method of using ``pgtrigger.register``.
## 2.4.1 (2022-02-24)
### Trivial
- Updated with the latest template, dropped 3.6 supported, added Docker-based development [Wes Kendall, 25e0f0d]
## 2.4.0 (2021-08-15)
### Bug
- Ensure that generated postgres IDs are lowercase [Wes Kendall, 5c12f66]
django-pgtrigger now ensures that generated postgres IDs are
lowercase. Postgres IDs are case insensitive, and it django-pgtrigger
had issues dealing with names that had a mix of cases.
### Feature
- Add the "declare" portion of a trigger as a top-level attribute [Wes Kendall, cd18512]
Previously one had to subclass a trigger and override ``get_declare`` in
order to change how the "DECLARE" fragment of a trigger was rendered.
Users can now provide ``declare`` to the instantiation of a trigger.
The documentation was updated to reflect this change.
### Trivial
- Fix broken code examples in docs [Wes Kendall, 372719c]
## 2.3.3 (2021-08-15)
### Trivial
- Adjusted max length of trigger names to 47 characters [Wes Kendall, 528140f]
- Updated to the latest Django app template [Wes Kendall, d2d5328]
- Change "Delete" to "Update" in tutorial docs [Rich Rauenzahn, 2839a78]
## 2.3.2 (2021-05-30)
### Trivial
- Fixing tags after organization migration [Wes Kendall, 0ba84d2]
## 2.3.1 (2021-05-29)
### Bug
- Throw errors on invalid trigger definitions. [Wes Kendall, 28f1329]
Previously triggers were installed with a broad try/except in order to ignore
errors when installing duplicate triggers. This caused invalid triggers to
not be installed with no errors thrown.
The code was updated to catch the specific exception for duplicate triggers
and allow other trigger errors to surface. A failing test case was
added.
- Fix for wrong argument supplied at _get_database fn call [arpit o.O, 2f7cea1]
### Trivial
- Updated with the latest django app template [Wes Kendall, 9a71227]
- Fix incorrect name in example [Simon Willison, 069e05a]
## 2.2.1 (2021-02-23)
### Trivial
- Optionally change "other" DB name if set at all [Tómas Árni Jónasson, 5b24058]
## 2.2.0 (2021-02-09)
### Feature
- Multiple database support [Wes Kendall, b09ba73]
Supports multiple-database functionality in all core functions and management commands.
By default, all functions and management commands operate over all databases in a
multi-database setup. This behavior can be overridden with the ``--database`` flag.
When calling ``manage.py migrate``, only the database being migrated will have
relevant triggers installed. This fits into how Django supports multi-database
migrations.
## 2.1.0 (2020-10-20)
### Bug
- Fixed possibility of duplicate trigger function names [Wes Kendall, b9b1552]
django-pgtrigger previously enforced that no model could have the
same trigger name, however, the trigger function being called
is a globally unique name that needs to be checked.
django-pgtrigger now adds a hash to the trigger function and
installed trigger name based on the registered model. This
prevents a global collision for trigger functions.
Note that this change will make it appear like no triggers
are installed. Upgrading to this version will involve dropping
and re-creating existing triggers.
## 2.0.0 (2020-10-12)
### Api-Break
- Trigger management commands [Wes Kendall, be26d33]
Adds the ability to manage triggers by name
with the ``manage.py pgtrigger`` management command. This
change includes the following subcommands:
- ``manage.py pgtrigger ls``: List all triggers, their installation
status, and whether they are enabled or disabled.
- ``manage.py pgtrigger install``: Install triggers.
- ``manage.py pgtrigger uninstall``: Uninstall triggers.
- ``manage.py pgtrigger enable``: Enable triggers.
- ``manage.py pgtrigger disable``: Disable triggers.
- ``manage.py pgtrigger prune``: Prune triggers.
Because of this change, names are now enforced for every trigger
and must be unique for every model. Users that wish to
upgrade to this version must now supply a ``name`` keyword
argument to their triggers.
Docs were updated with references to the new management commands.
## 1.3.0 (2020-07-23)
### Feature
- Extend the ``pgtrigger.SoftDelete`` trigger to support more field types. [Wes Kendall, 4dd8cf8]
``pgtrigger.SoftDelete`` takes an optional "value" argument to assign to
the soft-deleted attribute upon deletion. This allows for more flexibility
in soft-delete models that might, for example, set a ``CharField`` to
"inactive".
- ``pgtrigger.FSM`` enforces a finite state machine on a field. [Wes Kendall, bd3980e]
The ``pgtrigger.FSM`` trigger allows a user to configure a field and
a set of valid transitions for the field. An error will be raised
if any transitions happen that are not part of the valid transitions
list.
The docs were updated with an example of how to use ``pgtrigger.FSM``.
### Trivial
- Added trigger cookbook example for how to track history and model changes. [Wes Kendall, 114a70a]
- Add "versioning" example to trigger cookbook. [Wes Kendall, 842ad5b]
- Added trigger cookbook example of freezing a published model [Wes Kendall, 994e9da]
## 1.2.0 (2020-07-23)
### Feature
- Added ``pgtrigger.ignore`` for dynamically ignoring triggers. [Wes Kendall, b3557bb]
``pgtrigger.ignore`` can be used to ignore triggers per thread of
execution. Docs were updated with examples of how to use
``pgtrigger.ignore`` and how to utilize it to create
"official" interfaces.
- Allow custom naming of triggers [Wes Kendall, 864d653]
Triggers can be given a "name" attribute that is used when generating
the trigger and obtaining it from the registry. This will not only
make trigger management in the future easier, but it will also make
it possible to dynamically ignore specific triggers registered to
models.
## 1.1.0 (2020-07-21)
### Feature
- Added "Referencing" construct for statement-level triggers. [Wes Kendall, 20d958e]
The ``pgtrigger.Referencing`` construct allows one to reference
transition tables in statement-level triggers.
- Added statement-level triggers. [Wes Kendall, c0cc365]
django-pgtrigger now has a "level" construct for specifying
row and statement-level triggers. All triggers default to being
row-level triggers.
### Trivial
- Support the "INSTEAD OF" construct for views on SQL triggers. [Wes Kendall, 79f9d54]
- Updated docs and added a quick start section [Wes Kendall, 9ce7b29]
## 1.0.1 (2020-06-29)
### Trivial
- Updated README and updated with the latest public django app template. [Wes Kendall, 001ef68]
## 1.0.0 (2020-06-27)
### Api-Break
- Initial release of django-pgtrigger. [Wes Kendall, 1f737f0]
``django-pgtrigger`` provides primitives for configuring
`Postgres triggers <https://www.postgresql.org/docs/current/sql-createtrigger.html>`__
on Django models.
Models can be decorated with `pgtrigger.register` and supplied with
`pgtrigger.Trigger` objects. These will automatically be installed after
migrations. Users can use Django idioms such as ``Q`` and ``F`` objects to
declare trigger conditions, alleviating the need to write raw SQL for a large
amount of use cases.
``django-pgtrigger`` comes built with some derived triggers for expressing
common patterns. For example, `pgtrigger.Protect` can protect operations
on a model, such as deletions or updates (e.g. an append-only model). The
`pgtrigger.Protect` trigger can even target protecting operations on
specific updates of fields (e.g. don't allow updates if ``is_active`` is
``False`` on a model). Another derived trigger, `pgtrigger.SoftDelete`,
can soft-delete models by setting a field to ``False`` when a deletion
happens on the model.
Contributing Guide
==================
This project was created using footing.
For more information about footing, go to the
`footing docs <https://github.com/Opus10/footing>`_.
Setup
~~~~~
Set up your development environment with::
git clone git@github.com:Opus10/django-pgtrigger.git
cd django-pgtrigger
make docker-setup
``make docker-setup`` will set up a development environment managed by Docker.
Install docker `here <https://www.docker.com/get-started>`_ and be sure
it is running when executing any of the commands below.
If you prefer a native development environment,
``make conda-setup`` will set up a development environment managed
by `Conda <https://conda.io>`__. The database must be ran manually.
Testing and Validation
~~~~~~~~~~~~~~~~~~~~~~
Run the tests on one Python version with::
make test
Run the full test suite against all supported Python versions with::
make full-test-suite
Validate the code with::
make lint
If your code fails the ``black`` check, automatically format your code with::
make format
Committing
~~~~~~~~~~
This project uses `git-tidy <https://github.com/Opus10/git-tidy>`_ to produce structured
commits with git trailers. Information from commit messages is used to generate release
notes and bump the version properly.
To do a structured commit with ``git-tidy``, do::
make tidy-commit
All commits in a pull request must be tidy commits that encapsulate a
change. Ideally entire features or bug fixes are encapsulated in a
single commit. Squash all of your commits into a tidy commit with::
make tidy-squash
To check if your commits pass linting, do::
make tidy-lint
Note, the above command lints every commit since branching from master.
You can also run ``make shell`` and run ``git tidy`` commands inside
the docker environment to do other flavors of ``git tidy`` commands.
Documentation
~~~~~~~~~~~~~
`Sphinx <http://www.sphinx-doc.org/>`_ documentation can be built with::
make docs
The static HTML files are stored in the ``docs/_build/html`` directory.
A shortcut for opening them (on OSX) is::
make open-docs
Releases and Versioning
~~~~~~~~~~~~~~~~~~~~~~~
Anything that is merged into the master branch will be automatically deployed
to PyPI. Documentation will be published to a ReadTheDocs at
``https://django-pgtrigger.readthedocs.io/``.
The following files will be generated and should *not* be edited by a user:
* ``CHANGELOG.md`` - Contains an automatically-generated change log for
each release.
This project uses `Semantic Versioning <http://semver.org>`_ by analyzing
``Type:`` trailers on git commit messages (trailers are added when using
``git tidy-commit``). In order to bump the minor
version, use "feature" or "bug" as the type.
In order to bump the major version, use "api-break". The patch version
will be updated automatically if none of these tags are present.
LICENSE 0 → 100644
Copyright (c) 2022, Opus 10
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL OPUS 10 BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Makefile 0 → 100644
# Makefile for packaging and testing django-pgtrigger
#
# This Makefile has the following targets:
#
# setup - Sets up the development environment
# dependencies - Installs dependencies
# clean-docs - Clean the documentation folder
# open-docs - Open any docs generated with "make docs"
# docs - Generated sphinx docs
# lint - Run code linting and static checks
# format - Format code using black
# test - Run tests using pytest
# full-test-suite - Run full test suite using tox
# shell - Run a shell in a virtualenv
# docker-teardown - Spin down docker resources
OS = $(shell uname -s)
PACKAGE_NAME=django-pgtrigger
MODULE_NAME=pgtrigger
SHELL=bash
DATABASE_URL?=postgres://postgres:postgres@db:5432/postgres
ifeq (${OS}, Linux)
DOCKER_CMD?=sudo docker
DOCKER_RUN_ARGS?=-v /home:/home -v $(shell pwd):/code -e EXEC_WRAPPER="" -u "$(shell id -u):$(shell id -g)" -v /etc/passwd:/etc/passwd
# The user can be passed to docker exec commands in Linux.
# For example, "make shell user=root" for access to apt-get commands
user?=$(shell id -u)
group?=$(shell id ${user} -u)
EXEC_WRAPPER?=$(DOCKER_CMD) exec --user="$(user):$(group)" -it $(PACKAGE_NAME)
else ifeq (${OS}, Darwin)
DOCKER_CMD?=docker
DOCKER_RUN_ARGS?=-v ~/:/home/circleci -v $(shell pwd):/code -e EXEC_WRAPPER=""
EXEC_WRAPPER?=$(DOCKER_CMD) exec -it $(PACKAGE_NAME)
endif
# Docker run mounts the local code directory, SSH (for git), and global git config information
DOCKER_RUN_CMD?=$(DOCKER_CMD)-compose run --name $(PACKAGE_NAME) $(DOCKER_RUN_ARGS) -d app
# Print usage of main targets when user types "make" or "make help"
.PHONY: help
help:
ifndef run
@echo "Please choose one of the following targets: \n"\
" docker-setup: Setup Docker development environment\n"\
" conda-setup: Setup Conda development environment\n"\
" lock: Lock dependencies\n"\
" dependencies: Install dependencies\n"\
" shell: Start a shell\n"\
" test: Run tests\n"\
" tox: Run tests against all versions of Python\n"\
" lint: Run code linting and static checks\n"\
" format: Format code using Black\n"\
" docs: Build Sphinx documentation\n"\
" open-docs: Open built documentation\n"\
" docker-teardown: Spin down docker resources\n"\
"\n"\
"View the Makefile for more documentation"
@exit 2
else
$(EXEC_WRAPPER) $(run)
endif
# Pull the latest container and start a detached run
.PHONY: docker-start
docker-start:
$(DOCKER_CMD)-compose pull
$(DOCKER_RUN_CMD)
# Lock dependencies
.PHONY: lock
lock:
$(EXEC_WRAPPER) poetry lock --no-update
# Install dependencies
.PHONY: dependencies
dependencies:
mkdir -p .venv
$(EXEC_WRAPPER) poetry install --no-ansi
.PHONY: multi-db-setup
multi-db-setup:
-$(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE DATABASE ${MODULE_NAME}_local_other WITH TEMPLATE ${MODULE_NAME}_local"
$(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE SCHEMA IF NOT EXISTS \"order\""
$(DOCKER_EXEC_WRAPPER) psql $(DATABASE_URL) -c "CREATE SCHEMA IF NOT EXISTS receipt;"
# Set up git configuration
.PHONY: git-setup
git-setup:
$(EXEC_WRAPPER) git tidy --template -o .gitcommit.tpl
$(EXEC_WRAPPER) git config --local commit.template .gitcommit.tpl
# Sets up the local database
.PHONY: db-setup
db-setup:
-psql postgres -c "CREATE USER postgres;"
-psql postgres -c "ALTER USER postgres SUPERUSER;"
-psql postgres -c "CREATE DATABASE ${MODULE_NAME}_local OWNER postgres;"
-psql postgres -c "GRANT ALL PRIVILEGES ON DATABASE ${MODULE_NAME}_local to postgres;"
$(EXEC_WRAPPER) python manage.py migrate
# Sets up a conda development environment
.PHONY: conda-create
conda-create:
-conda env create -f environment.yml --force
$(EXEC_WRAPPER) poetry config virtualenvs.create false --local
# Sets up a Conda development environment
.PHONY: conda-setup
conda-setup: EXEC_WRAPPER=conda run -n ${PACKAGE_NAME} --no-capture-output
conda-setup: conda-create lock dependencies git-setup db-setup
# Sets up a Docker development environment
.PHONY: docker-setup
docker-setup: docker-teardown docker-start lock dependencies git-setup
# Spin down docker resources
.PHONY: docker-teardown
docker-teardown:
$(DOCKER_CMD)-compose down --remove-orphans
# Run a shell
.PHONY: shell
shell:
$(EXEC_WRAPPER) /bin/bash
# Run pytest
.PHONY: test
test:
$(EXEC_WRAPPER) pytest
# Run full test suite
.PHONY: full-test-suite
full-test-suite:
$(EXEC_WRAPPER) tox
# Clean the documentation folder
.PHONY: clean-docs
clean-docs:
-$(EXEC_WRAPPER) bash -c 'cd docs && make clean'
# Open the build docs (only works on Mac)
.PHONY: open-docs
open-docs:
ifeq (${OS}, Darwin)
open docs/_build/html/index.html
else ifeq (${OS}, Linux)
xdg-open docs/_build/html/index.html
else
@echo "Open 'docs/_build/html/index.html' to view docs"
endif
# Build Sphinx autodocs
.PHONY: docs
docs: clean-docs # Ensure docs are clean, otherwise weird render errors can result
$(EXEC_WRAPPER) bash -c 'cd docs && make html'
# Run code linting and static analysis. Ensure docs can be built
.PHONY: lint
lint:
$(EXEC_WRAPPER) black . --check
$(EXEC_WRAPPER) flake8 -v ${MODULE_NAME}
$(EXEC_WRAPPER) footing update --check
$(EXEC_WRAPPER) bash -c 'cd docs && make html'
# Lint commit messages
.PHONY: tidy-lint
tidy-lint:
$(EXEC_WRAPPER) git tidy-lint origin/master..
# Perform a tidy commit
.PHONY: tidy-commit
tidy-commit:
$(EXEC_WRAPPER) git tidy-commit
# Perform a tidy squash
.PHONY: tidy-squash
tidy-squash:
$(EXEC_WRAPPER) git tidy-squash origin/master
# Format code with black
.PHONY: format
format:
$(EXEC_WRAPPER) black .
django-pgtrigger
################
``django-pgtrigger`` helps you write
`Postgres triggers <https://www.postgresql.org/docs/current/sql-createtrigger.html>`__
for your Django models.
Why should I use triggers?
==========================
Triggers can solve a variety of complex problems more reliably, performantly, and succinctly than application code.
For example,
* Protecting operations on rows or columns (``pgtrigger.Protect``).
* Making read-only models or fields (``pgtrigger.ReadOnly``).
* Soft-deleting models (``pgtrigger.SoftDelete``).
* Snapshotting and tracking model changes (`django-pghistory <https://django-pghistory.readthedocs.io/>`__).
* Enforcing field transitions (``pgtrigger.FSM``).
* Keeping a search vector updated for full-text search (``pgtrigger.UpdateSearchVector``).
* Building official interfaces
(e.g. enforcing use of ``User.objects.create_user`` and not
``User.objects.create``).
* Versioning models, mirroring fields, computing unique model hashes, and the list goes on...
All of these examples require no overridden methods, no base models, and no signal handling.
Quick start
===========
Install ``django-pgtrigger`` with ``pip3 install django-pgtrigger`` and
add ``pgtrigger`` to ``settings.INSTALLED_APPS``.
``pgtrigger.Trigger`` objects are added to ``triggers`` in model
``Meta``. ``django-pgtrigger`` comes with several trigger classes,
such as ``pgtrigger.Protect``. In the following, we're protecting
the model from being deleted:
.. code-block:: python
class ProtectedModel(models.Model):
"""This model cannot be deleted!"""
class Meta:
triggers = [
pgtrigger.Protect(name="protect_deletes", operation=pgtrigger.Delete)
]
When migrations are created and executed, ``ProtectedModel`` will raise an
exception anytime a deletion is attempted.
Let's extend this example further and only protect deletions on inactive objects.
In this example, the trigger conditionally runs when the row being deleted
(the ``OLD`` row in trigger terminology) is still active:
.. code-block:: python
class ProtectedModel(models.Model):
"""Active object cannot be deleted!"""
is_active = models.BooleanField(default=True)
class Meta:
triggers = [
pgtrigger.Protect(
name="protect_deletes",
operation=pgtrigger.Delete,
condition=pgtrigger.Q(old__is_active=True)
)
]
``django-pgtrigger`` uses ``pgtrigger.Q`` and ``pgtrigger.F`` objects to
conditionally execute triggers based on the ``OLD`` and ``NEW`` rows.
Combining these Django idioms with ``pgtrigger.Trigger`` objects
can solve a wide variety of problems without ever writing SQL. Users,
however, can still use raw SQL for complex cases.
Triggers are installed like other database objects. Run
``python manage.py makemigrations`` and ``python manage.py migrate`` to install triggers.
If triggers are new to you, don't worry.
The `pgtrigger docs <https://django-pgtrigger.readthedocs.io/>`__ cover triggers in
more detail and provide many examples.
Compatibility
=============
``django-pgtrigger`` is compatible with Python 3.7 - 3.11, Django 3.2 - 4.2, Psycopg 2 - 3, and Postgres 10 - 15.
Documentation
=============
`View the pgtrigger docs here <https://django-pgtrigger.readthedocs.io/>`__ to
learn more about:
* Trigger basics and motivation for using triggers.
* How to use the built-in triggers and how to build custom ones.
* Installing triggers on third-party models, many-to-many fields, and other
advanced scenarios.
* Ignoring triggers dynamically and deferring trigger execution.
* Multiple database, schema, and partitioning support.
* Frequently asked questions, common issues, and upgrading.
* The commands, settings, and module.
Installation
============
Install django-pgtrigger with::
pip3 install django-pgtrigger
After this, add ``pgtrigger`` to the ``INSTALLED_APPS``
setting of your Django project.
Other Material
==============
After you've read the docs, check out
`this tutorial <https://wesleykendall.github.io/django-pgtrigger-tutorial/>`__
with interactive examples from a Django meetup talk.
The `DjangoCon 2021 talk <https://www.youtube.com/watch?v=Tte3d4JjxCk>`__
also breaks down triggers and shows several examples.
Contributing Guide
==================
For information on setting up django-pgtrigger for development and
contributing changes, view `CONTRIBUTING.rst <CONTRIBUTING.rst>`_.
Primary Authors
===============
- @wesleykendall (Wes Kendall, wesleykendall@protonmail.com)
Other Contributors
==================
- @jzmiller1
- @rrauenza
- @ralokt
- @adamchainz
devops.py 0 → 100644
#!/usr/bin/env python3
"""
Devops functions for this package. Includes functions for automated
package deployment, changelog generation, and changelog checking.
This script is generated by the template at
https://github.com/Opus10/public-django-app-template
Do not change this script! Any fixes or updates to this script should be made
to https://github.com/Opus10/public-django-app-template
"""
import os
import subprocess
import sys
import tempfile
from packaging import version
CIRCLECI_ENV_VAR = "CIRCLECI"
class Error(Exception):
"""Base exception for this script"""
class NotOnCircleCIError(Error):
"""Thrown when not running on CircleCI"""
def _check_git_version():
"""Verify git version"""
git_version = _shell_stdout("git --version | rev | cut -f 1 -d' ' | rev")
if version.parse(git_version) < version.parse("2.22.0"):
raise RuntimeError(f"Must have git version >= 2.22.0 (version = {git_version})")
def _shell(cmd, check=True, stdin=None, stdout=None, stderr=None): # pragma: no cover
"""Runs a subprocess shell with check=True by default"""
return subprocess.run(cmd, shell=True, check=check, stdin=stdin, stdout=stdout, stderr=stderr)
def _shell_stdout(cmd, check=True):
"""Runs a shell command and returns stdout"""
ret = _shell(cmd, stdout=subprocess.PIPE, check=check)
return ret.stdout.decode("utf-8").strip() if ret.stdout else ""
def _configure_git():
"""Configure git name/email and verify git version"""
_check_git_version()
_shell('git config --local user.email "wesleykendall@protonmail.com"')
_shell('git config --local user.name "Opus 10 Devops"')
_shell("git config push.default current")
def _find_latest_tag():
return _shell_stdout("git describe --tags --abbrev=0", check=False)
def _find_sem_ver_update():
"""
Find the semantic version string based on the commit log.
Defaults to returning "patch"
"""
sem_ver = "patch"
latest_tag = _find_latest_tag()
log_section = f"{latest_tag}..HEAD" if latest_tag else ""
cmd = (
f"git log {log_section} --pretty='%(trailers:key=type,valueonly)'"
" | grep -q {sem_ver_type}"
)
change_types_found = {
change_type: _shell(cmd.format(sem_ver_type=change_type), check=False).returncode == 0
for change_type in ["bug", "feature", "api-break"]
}
if change_types_found["api-break"]:
sem_ver = "major"
elif change_types_found["bug"] or change_types_found["feature"]:
sem_ver = "minor"
return sem_ver
def _update_package_version():
"""Apply semantic versioning to package based on git commit messages"""
# Obtain the current version
old_version = _shell_stdout("poetry version | rev | cut -f 1 -d' ' | rev")
if old_version == "0.0.0":
old_version = ""
latest_tag = _find_latest_tag()
if old_version and version.parse(old_version) != version.parse(latest_tag):
raise RuntimeError(
f'The latest tag "{latest_tag}" and the current version'
f' "{old_version}" do not match.'
)
# Find out the sem-ver tag to apply
sem_ver = _find_sem_ver_update()
_shell(f"poetry version {sem_ver}")
# Get the new version
new_version = _shell_stdout("poetry version | rev | cut -f 1 -d' ' | rev")
if new_version == old_version:
raise RuntimeError(f'Version update could not be applied (version = "{old_version}")')
return old_version, new_version
def _generate_changelog_and_tag(old_version, new_version):
"""Generates a change log using git-tidy and tags repo"""
# Tag the version temporarily so that changelog generation
# renders properly
_shell(f'git tag -f -a {new_version} -m "Version {new_version}"')
# Generate the full changelog
_shell("git tidy-log > CHANGELOG.md")
# Generate a requirements.txt for readthedocs.org
_shell("poetry export --dev --without-hashes -f requirements.txt > docs/requirements.txt")
_shell('echo "." >> docs/requirements.txt')
# Add all updated files
_shell("git add pyproject.toml CHANGELOG.md docs/requirements.txt")
# Use [skip ci] to ensure CircleCI doesnt recursively deploy
_shell(
'git commit --no-verify -m "Release version'
f' {new_version} [skip ci]" -m "Type: trivial"'
)
# Create release notes just for this release so that we can use them in
# the commit message
with tempfile.NamedTemporaryFile() as commit_msg_file:
_shell(f'echo "{new_version}\n" > {commit_msg_file.name}')
tidy_log_args = f"^{old_version} HEAD" if old_version else "HEAD"
_shell(f"git tidy-log {tidy_log_args} >> {commit_msg_file.name}")
# Update the tag so that it includes the latest release messages and
# the automated commit
_shell(f"git tag -d {new_version}")
_shell(f"git tag -f -a {new_version} -F {commit_msg_file.name}" " --cleanup=whitespace")
def _publish_to_pypi():
"""
Uses poetry to publish to pypi
"""
if "PYPI_USERNAME" not in os.environ or "PYPI_PASSWORD" not in os.environ:
raise RuntimeError("Must set PYPI_USERNAME and PYPI_PASSWORD env vars")
_shell("poetry config http-basic.pypi ${PYPI_USERNAME} ${PYPI_PASSWORD}")
_shell("poetry build")
_shell("poetry publish -vvv -n", stdout=subprocess.PIPE)
def _build_and_push_distribution():
"""
Builds and pushes distribution to PyPI, along with pushing the
tags back to the repo
"""
_publish_to_pypi()
# Push the code changes after succcessful pypi deploy
_shell("git push --follow-tags")
def deploy():
"""Deploys the package and uploads documentation."""
# Ensure proper environment
if not os.environ.get(CIRCLECI_ENV_VAR): # pragma: no cover
raise NotOnCircleCIError("Must be on CircleCI to run this script")
_configure_git()
old_version, new_version = _update_package_version()
_generate_changelog_and_tag(old_version, new_version)
_build_and_push_distribution()
print(f"Deployment complete. Latest version is {new_version}")
if __name__ == "__main__":
if sys.argv[-1] == "deploy":
deploy()
else:
raise RuntimeError(f'Invalid subcommand "{sys.argv[-1]}"')
version: "3.3"
services:
db:
image: cimg/postgres:14.4
volumes:
- ./.db:/var/lib/postgresql/data
environment:
- POSTGRES_NAME=postgres
- POSTGRES_USER=postgres
- POSTGRES_PASSWORD=postgres
app:
image: opus10/circleci-public-django-app
environment:
- DATABASE_URL=postgres://postgres:postgres@db:5432/postgres
depends_on:
- db
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS = -W
SPHINXBUILD = sphinx-build
SPHINXPROJ = django-pgtrigger
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
\ No newline at end of file
.. _advanced_db:
Advanced Database Setups
========================
Here we cover details about more advanced database setups that
might impact how you use triggers.
Multiple Databases
------------------
Triggers are migrated for multiple database just like models. If you define a
custom router, triggers will be installed based on ``allow_migrate``.
See the `the Django docs on multiple databases <https://docs.djangoproject.com/en/4.1/topics/db/multi-db/>`__
for more info.
.. warning::
If you migrate triggers and afterwards change the behavior of the router's
``allow_migrate``, you risk having orphaned triggers installed on tables.
The management commands and core installation functions work the same way,
targetting an individual database like Django's ``migrate`` command.
Each command can be supplied with a ``-d`` or ``--database`` option.
For example, ``python manage.py pgtrigger install --database other`` will
install all of the triggers on the ``other`` database.
If ``allow_migrate`` ignores a particular model for a database, the
installation status will show as ``UNALLOWED`` when using
``python manage.py pgtrigger ls``.
.. note::
If you've configured ``settings.INSTALL_ON_MIGRATE``, triggers will
be installed for the same database as the ``migrate`` command.
Dynamic runtime functions `pgtrigger.ignore`, `pgtrigger.schema`, and
`pgtrigger.constraints` operate on all postgres databases at once
unless the ``databases`` argument is provided.
Schemas
-------
There are two common ways of using Postgres schemas in Django, both of which
work with ``django-pgtrigger``:
1. Create a database in ``settings.DATABASES`` for each schema, configuring the
``search_path`` in the ``OPTIONS``.
2. Use an app like `django-tenants <https://github.com/django-tenants/django-tenants>`__
to dynamically set the ``search_path`` for a single database.
When using the first approach, use the multi-database support detailed in
the previous section. For the second approach, ``django-pgtrigger``
comes with the following functionality to dynamically set the ``search_path``:
1. Pass ``--schema`` (``-s``) arguments for management
commands. For example, this sets ``search_path`` to ``myschema,public``
and shows trigger installation status relative to those schemas::
python manage.py pgtrigger ls -s my_schema -s public
2. Programmatically set the search path with `pgtrigger.schema`.
For example, this sets the ``search_path`` to ``myschema,public``::
with pgtrigger.schema("myschema", "public"):
# seach_path is set to "myschema,public". Any nested invocations of
# pgtrigger.schema will append to the path if not currently
# present
.. note::
If you find yourself wrapping the ``django-pgtrigger`` API with `pgtrigger.schema`,
open an issue and let us know about your use case. We may consider making it a
first-class citizen in the API if it's common.
The final thing to keep in mind with multi-schema support is that `pgtrigger.ignore`
uses a special Postgres function for ignoring triggers that's installed under
the public schema. The function is always referenced with a fully-qualified name.
If you don't use the public schema, configure the schema with
``settings.PGTRIGGER_SCHEMA``. Setting this to ``None`` uses a relative path when
installing and calling the function.
Partitions
----------
``django-pgtrigger`` supports tables that use `Postgres table partitioning <https://www.postgresql.org/docs/current/ddl-partitioning.html>`__ with no additional configuration.
.. note::
Row-level triggers are only available for partitioned tables in Postgres 13 and above.
Triggers cannot be installed or uninstalled on a per-partition basis. Installing a trigger on a partitioned
table installs it for all partitions.
\ No newline at end of file
.. _advanced_installation:
Advanced Installation
=====================
Third-party models
------------------
Install triggers on third-party models by declaring them on a proxy model.
For example, here we protect Django's ``User`` model from being deleted:
.. code-block:: python
class UserProxy(User):
class Meta:
proxy = True
triggers = [
pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete)
]
Default many-to-many "through" models
-------------------------------------
Similar to third-party models, we can also install triggers against default
many-to-many "through" models by using a proxy model.
Here we protect Django ``User`` group relationships from being deleted:
.. code-block:: python
class UserGroupTriggers(User.groups.through):
class Meta:
proxy = True
triggers = [
pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete)
]
.. warning::
Django doesn't fully support making proxy models from default through relationships.
Reversing migrations can sometimes throw ``InvalidBases`` errors.
We recommend creating a custom through model when possible. See
the `Django docs on making custom "through" models <https://docs.djangoproject.com/en/4.0/topics/db/models/#extra-fields-on-many-to-many-relationships>`__.
Declaring triggers in base models
---------------------------------
Triggers can be declared in an abstract model and inherited. Here is a base model for
soft-delete models:
.. code-block:: python
class BaseSoftDelete(models.Model):
is_active = models.BooleanField(default=True)
class Meta:
abstract = True
triggers = [pgtrigger.SoftDelete(name="soft_delete", field="is_active")]
Keep in mind that ``Meta`` class inheritance follows standard Django convention. If
the child model defines a ``Meta`` class, you will need to inherit the parent's
``Meta`` class like so:
.. code-block:: python
class ChildModel(BaseSoftDelete):
class Meta(BaseSoftDelete.Meta):
ordering = ["is_active"]
Programmatically registering triggers
-------------------------------------
Triggers can be registered programmatically with `pgtrigger.register`.
It can be used as a decorator on a model or called like so:
.. code-block:: python
# Register a protection trigger for a model
pgtrigger.register(pgtrigger.Protect(...))(MyModel)
.. warning::
Although triggers can be registered programmatically, we don't recommend doing
this except for advanced use cases. Registering a trigger
to a model of a third-party app will create migrations in that app. This could
result in migrations not being added to your codebase, which can result in triggers
not being installed.
.. _turning_off_migrations:
Turning off migration integration
---------------------------------
``django-pgtrigger`` patches Django's migration system so that triggers are installed
and updated in migrations. If this is undesirable, you can
disable the migration integration by setting ``settings.PGTRIGGER_MIGRATIONS`` to
``False``. After this, you are left with two options:
1. Manually install triggers with the commands detailed in the next section.
2. Run trigger installation after every ``python manage.py migrate`` by setting
``settings.PGTRIGGER_INSTALL_ON_MIGRATE`` to ``True``. Keep in mind that
reversing migrations can cause issues when installing triggers this way.
.. warning::
There are known issues with installing triggers after migrations that
cannot be fixed. For example, reversing migrations can result in trigger
installation errors, and race conditions can happen if triggers are
installed after the underlying tables have been migrated.
Manual installation, enabling, and disabling
--------------------------------------------
The following commands allow one to manually manage trigger installation
and are detailed more in the :ref:`commands` section:
.. danger::
The commands are are global operations. Use these commands with extreme caution, especially if
the triggers are managed by migrations. If you need to temporarily ignore
a trigger inside your application, see the
:ref:`ignoring_triggers` section.
* ``python manage.py pgtrigger install``: Install triggers
* ``python manage.py pgtrigger uninstall``: Uninstall triggers
* ``python manage.py pgtrigger enable``: Enable triggers
* ``python manage.py pgtrigger disable``: Disable triggers
* ``python manage.py pgtrigger prune``: Uninstall triggers created by ``django-pgtrigger``
that are no longer in the codebase.
Showing installation status
---------------------------
Use ``python manage.py pgtrigger ls`` to see the installation status of individual triggers
or all triggers at once. View the :ref:`commands` section for descriptions of the different
installation states.
\ No newline at end of file
.. _basics:
Basics
======
The anatomy of a trigger
~~~~~~~~~~~~~~~~~~~~~~~~
Postgres triggers are database functions written in PL/pgSQL that execute based on events
and conditions.
The `pgtrigger.Trigger` object is the base class for all triggers in ``django-pgtrigger``.
Its attributes mirror the syntax required for
`making a Postgres trigger <https://www.postgresql.org/docs/current/sql-createtrigger.html>`__.
Here are the most common attributes you'll use:
* **name**
The identifying name of trigger. Is unique for every model and must
be less than 48 characters.
* **operation**
The table operation that fires a trigger. Operations are `pgtrigger.Update`,
`pgtrigger.Insert`, `pgtrigger.Delete`,
`pgtrigger.Truncate`, or `pgtrigger.UpdateOf`.
They can be ``OR`` ed
together (e.g. ``pgtrigger.Insert | pgtrigger.Update``)
to configure triggers on a combination of operations.
.. note::
`pgtrigger.UpdateOf` fires when columns appear in an ``UPDATE``
statement. It will not fire if other triggers update the columns.
See the notes in the
`Postgres docs <https://www.postgresql.org/docs/12/sql-createtrigger.html>`__
for more information.
.. note::
Some conditions cannot be combined. For
example, `pgtrigger.UpdateOf` cannot be combined with other
operations.
* **when**
When the trigger should run in relation to the operation.
`pgtrigger.Before` executes the trigger before the operation, and
vice versa for `pgtrigger.After`. `pgtrigger.InsteadOf` is used for SQL views.
.. note::
`pgtrigger.Before` and `pgtrigger.After` can be used on SQL views
under some circumstances. See
`the Postgres docs <https://www.postgresql.org/docs/12/sql-createtrigger.html>`__
for a breakdown.
* **condition** *(optional)*
Conditionally execute the trigger based on the ``OLD``
or ``NEW`` rows.
`pgtrigger.Condition` objects accept `pgtrigger.Q` and `pgtrigger.F`
objects for constructing ``WHERE`` clauses with the ``OLD`` and ``NEW`` rows.
Conditions can also be created from raw SQL. See the :ref:`cookbook` for
more examples.
.. note::
Be sure to familiarize yourself with ``OLD`` and ``NEW`` rows when
writing conditions by consulting the `Postgres docs <https://www.postgresql.org/docs/current/plpgsql-trigger.html>`__.
For example, ``OLD`` is always ``NULL`` in `pgtrigger.Insert` triggers.
Here are attributes you'll need when writing more complex
triggers.
* **func**
The raw PL/pgSQL function that is executed.
.. note::
This is *not* the entire declared trigger function, but rather
the snippet of PL/pgSQL that is nested in the
```DECLARE ... BEGIN ... END``` portion of the trigger.
* **declare** *(optional)*
Define additional variable declarations as a list of ``(variable_name, variable_type)`` tuples.
For example ``declare=[('my_var_1', 'BOOLEAN'), ('my_var_2', 'JSONB')]``.
* **level** *(optional, default=pgtrigger.Row)*
Configures the trigger to fire once for every row (`pgtrigger.Row`) or once for
every statement (`pgtrigger.Statement`).
* **referencing** *(optional)*
References the ``OLD`` and ``NEW`` rows as transition tables in statement-level triggers.
For example, ``pgtrigger.Referencing(old='old_table_name', new='new_table_name')``
will make an ``old_table_name`` and ``new_table_name`` table available
as transition tables. See
`this StackExchange answer <https://dba.stackexchange.com/a/177468>`__ for additional
details, and see the :ref:`cookbook` for an example.
.. note::
The ``REFERENCING`` construct for statement-level triggers is only available
in Postgres 10 and up.
* **timing** *(optional)*
Create a deferrable ``CONSTRAINT`` trigger when set. Use `pgtrigger.Immediate` to
execute the trigger at the end of a statement and `pgtrigger.Deferred` to execute it
at the end of a transaction.
.. note::
Deferrable triggers must have the ``level`` set to `pgtrigger.Row` and ``when``
set to `pgtrigger.After`.
Defining and installing triggers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Triggers are defined in the ``triggers`` attribute of the model ``Meta``
class. For example, this trigger protects the model from being
deleted:
.. code-block:: python
from django.db import models
import pgtrigger
class CannotDelete(models.Model):
class Meta:
triggers = [
pgtrigger.Protect(name='protect_deletes', operation=pgtrigger.Delete)
]
Triggers are installed by first running ``python manage.py makemigrations`` and then ``python manage.py migrate``.
If you'd like to install a trigger on a model of a third-party app, see the
:ref:`advanced_installation` section. This section also covers how you can manually install,
enable, and disable triggers globally.
.. _advantages_of_triggers:
The advantages over signals and model methods
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are three key advantages to using triggers over implementing the logic
in a `Django signal handler <https://docs.djangoproject.com/en/4.1/topics/signals/>`__
or by overriding model methods:
1. **Reliability**: Unlike Python code, triggers run alongside queries in the database, ensuring that nothing
falls through the cracks. On the other hand, signals and model methods can provide a false sense of security.
For example, signals aren't fired for ``bulk_create``, and custom model methods aren't called in data
migrations by default. Third party apps that bypass the ORM will also not work reliably.
2. **Complexity**: Complexity can balloon when trying to override models, managers, or querysets to accomplish the
same logic a trigger can support. Even simple routines such as conditionally running code based on a
changed field are difficult to implement correctly and prone to race conditions.
3. **Performance**: Triggers can perform SQL queries without needing to do expensive round trips to the
database to fetch data. This can be a major performance enhancement for routines like history tracking
or data denormalization.
.. _commands:
Commands
========
``django-pgtrigger`` comes with the ``python manage.py pgtrigger`` command,
which has several subcommands that are described below.
ls
--
List all triggers managed by ``django-pgtrigger``.
**Options**
[uris ...]
Trigger URIs to list.
-d, --database List triggers on this database.
-s, --schema Use this schema as the search path. Can be provided multiple times.
**Ouput**
The following installation status markers are displayed:
- ``INSTALLED``: The trigger is installed and up to date
- ``OUTDATED``: The trigger is installed, but it has not been migrated
to the current version.
- ``UNINSTALLED``: The trigger is not installed.
- ``PRUNE``: A trigger is no longer in the codebase and still installed.
- ``UNALLOWED``: Trigger installation is not allowed for this database.
Only applicable in a multi-database environment.
Note that every installed trigger, including ones that will be pruned,
will show whether they are enabled or disabled. Disabled triggers are
installed but do not run.
install
-------
Install triggers. If no arguments are
provided, all triggers are installed and orphaned triggers are pruned.
**Options**
[uris ...]
Trigger URIs to install.
-d, --database Install triggers on this database.
-s, --schema Use this schema as the search path. Can be provided multiple times.
uninstall
---------
Uninstall triggers. If no arguments are
provided, all triggers are uninstalled and orphaned triggers will be pruned.
.. danger::
Running ``uninstall`` will globally uninstall triggers.
If you need to temporarily ignore a trigger, see the :ref:`ignoring_triggers` section.
**Options**
[uris ...]
Trigger URIs to uninstall.
-d, --database Uninstall triggers on this database.
-s, --schema Use this schema as the search path. Can be provided multiple times.
enable
------
Enable triggers.
**Options**
[uris ...]
Trigger URIs to enable.
-d, --database Enable triggers on this database.
-s, --schema Use this schema as the search path. Can be provided multiple times.
disable
-------
Disable triggers.
.. danger::
Running ``disable`` will globally disable the execution of triggers.
If you need to temporarily ignore a trigger, see the :ref:`ignoring_triggers` section.
**Options**
[uris ...]
Trigger URIs to enable.
-d, --database Disable triggers on this database.
-s, --schema Use this schema as the search path. Can be provided multiple times.
prune
-----
Uninstall any triggers managed by ``django-pgtrigger`` that are no longer in the codebase.
.. note::
Pruning happens automatically when doing ``python manage.py pgtrigger install``
or ``python manage.py pgtrigger uninstall``.
**Options**
-d, --database Prune triggers on this database.
-s, --schema Use this schema as the search path. Can be provided multiple times.
# -*- coding: utf-8 -*-
#
# django-pgtrigger documentation build configuration file, created by
# sphinx-quickstart on Tue Feb 28 09:45:59 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import os
import subprocess
import sys
import django
import sphinx_rtd_theme
import pgtrigger
sys.path.insert(0, os.path.abspath("."))
sys.path.insert(0, os.path.abspath(".."))
# Set these environment variables to ensure that ReadTheDocs builds work with
# our Django settings
os.environ["DJANGO_SETTINGS_MODULE"] = "settings"
os.environ["DATABASE_URL"] = "postgres://postgres:postgres@db:5432/postgres"
os.environ["SPHINX"] = "True"
django.setup()
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"myst_parser",
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "toc"
# default role for "`" (makes it attempt to match against references
# within project)
default_role = "any"
# General information about the project.
project = "django-pgtrigger"
copyright = "2022, Opus 10"
author = "Opus 10 Engineering"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = pgtrigger.__version__
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "django-pgtriggerdoc"
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"django-pgtrigger.tex",
"django-pgtrigger Documentation",
"Opus 10",
"manual",
)
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
"django-pgtrigger",
"django-pgtrigger Documentation",
[author],
1,
)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"django-pgtrigger",
"django-pgtrigger Documentation",
author,
"django-pgtrigger",
"Postgres trigger support integrated with Django models.",
"Miscellaneous",
)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {"https://docs.python.org/": None}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment