pax_global_header 0000666 0000000 0000000 00000000064 15130200524 0014502 g ustar 00root root 0000000 0000000 52 comment=c09209a29848d412e4995b7526d2fd8d49ceaa81
collerek-ormar-c09209a/ 0000775 0000000 0000000 00000000000 15130200524 0014751 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/.codeclimate.yml 0000664 0000000 0000000 00000000545 15130200524 0020027 0 ustar 00root root 0000000 0000000 version: "2"
checks:
method-complexity:
config:
threshold: 8
argument-count:
config:
threshold: 6
method-count:
config:
threshold: 25
method-length:
config:
threshold: 35
file-lines:
config:
threshold: 500
engines:
bandit:
enabled: true
checks:
assert_used:
enabled: false
collerek-ormar-c09209a/.codecov.yml 0000664 0000000 0000000 00000000741 15130200524 0017176 0 ustar 00root root 0000000 0000000 coverage:
precision: 2
round: down
range: "80...100"
status:
project: yes
patch: yes
changes: yes
comment:
layout: "reach, diff, flags, files"
behavior: default
require_changes: false # if true: only post the comment if coverage changes
require_base: no # [yes :: must have a base report to post]
require_head: yes # [yes :: must have a head report to post]
branches: # branch names that can post comment
- "master" collerek-ormar-c09209a/.coveragerc 0000664 0000000 0000000 00000000202 15130200524 0017064 0 ustar 00root root 0000000 0000000 [run]
source = ormar, tests
omit = ./tests/test.db, *py.typed*
data_file = .coverage
[report]
omit = ./tests/test.db, *py.typed*
collerek-ormar-c09209a/.github/ 0000775 0000000 0000000 00000000000 15130200524 0016311 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/.github/FUNDING.yml 0000664 0000000 0000000 00000000020 15130200524 0020116 0 ustar 00root root 0000000 0000000 github: collerek collerek-ormar-c09209a/.github/ISSUE_TEMPLATE/ 0000775 0000000 0000000 00000000000 15130200524 0020474 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/.github/ISSUE_TEMPLATE/bug_report.md 0000664 0000000 0000000 00000001505 15130200524 0023167 0 ustar 00root root 0000000 0000000 ---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
(Note: this should be a complete and concise piece of code that allows reproduction of an issue)
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Versions (please complete the following information):**
- Database backend used (mysql/sqlite/postgress)
- Python version
- `ormar` version
- `pydantic` version
- if applicable `fastapi` version
**Additional context**
Add any other context about the problem here.
collerek-ormar-c09209a/.github/ISSUE_TEMPLATE/config.yml 0000664 0000000 0000000 00000000631 15130200524 0022464 0 ustar 00root root 0000000 0000000 contact_links:
- name: I have a question ❓
url: https://github.com/collerek/ormar/discussions
about: If you have any question about the usage of ormar, please open a discussion first.
- name: I want a new feature 🆕
url: https://github.com/collerek/ormar/discussions
about: If you would like to request or make a change/enhancement that is not trivial, please open a discussion first.
collerek-ormar-c09209a/.github/ISSUE_TEMPLATE/feature_request.md 0000664 0000000 0000000 00000001134 15130200524 0024220 0 ustar 00root root 0000000 0000000 ---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
collerek-ormar-c09209a/.github/dependabot.yml 0000664 0000000 0000000 00000000533 15130200524 0021142 0 ustar 00root root 0000000 0000000 # Basic set up
# https://help.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#package-ecosystem
version: 2
updates:
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: daily
collerek-ormar-c09209a/.github/workflows/ 0000775 0000000 0000000 00000000000 15130200524 0020346 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/.github/workflows/auto-merge-dependabot.yml 0000664 0000000 0000000 00000003235 15130200524 0025244 0 ustar 00root root 0000000 0000000 # Based on https://docs.github.com/en/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/automating-dependabot-with-github-actions#enable-auto-merge-on-a-pull-request
name: Dependabot auto-approve and auto-merge
on: pull_request_target
permissions:
pull-requests: write
contents: write
jobs:
autoapprove:
name: Auto Approve a PR by dependabot
runs-on: ubuntu-latest
steps:
- name: Auto approve
uses: hmarr/auto-approve-action@v4.0.0
if: github.actor == 'dependabot[bot]'
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
dependabot:
runs-on: ubuntu-latest
if: ${{ github.actor == 'dependabot[bot]' }}
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v2.5.0
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Enable auto-merge for Dependabot PRs
# Automatically merge semver-patch and semver-minor PRs
# or black dependency upgrades
if: "${{ steps.metadata.outputs.update-type ==
'version-update:semver-minor' ||
steps.metadata.outputs.update-type ==
'version-update:semver-patch' ||
steps.metadata.outputs.dependency-names ==
'black' }}"
# https://cli.github.com/manual/gh_pr_merge
run: gh pr merge --auto --squash "$PR_URL"
env:
PR_URL: ${{github.event.pull_request.html_url}}
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
collerek-ormar-c09209a/.github/workflows/benchmark.yml 0000664 0000000 0000000 00000001736 15130200524 0023032 0 ustar 00root root 0000000 0000000 name: codspeed-benchmarks
on:
push:
branches: [ master, pydantic_v2 ]
pull_request:
branches: [ master, pydantic_v2 ]
# `workflow_dispatch` allows CodSpeed to trigger backtest
# performance analysis in order to generate initial data.
workflow_dispatch:
jobs:
benchmarks:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- uses: actions/setup-python@v6
with:
python-version: "3.11"
- name: Install Poetry
uses: snok/install-poetry@v1.4
with:
version: 1.8.4
virtualenvs-create: false
- name: Poetry details
run: |
poetry --version
poetry config --list
- name: Install dependencies
run: poetry install --extras "all"
- name: Run benchmarks
uses: CodSpeedHQ/action@v4
with:
mode: instrumentation
token: ${{ secrets.CODSPEED_TOKEN }}
run: poetry run pytest benchmarks/ --codspeed
collerek-ormar-c09209a/.github/workflows/deploy-docs.yml 0000664 0000000 0000000 00000001744 15130200524 0023321 0 ustar 00root root 0000000 0000000 name: Build Documentation using MkDocs
on:
push:
# Pattern matched against refs/tags
tags:
- '**'
jobs:
build:
name: Build and Deploy Documentation
runs-on: ubuntu-latest
steps:
- name: Checkout Master
uses: actions/checkout@v5
- name: Set up Python 3.8
uses: actions/setup-python@v6
with:
python-version: '3.8'
- name: Install Poetry
uses: snok/install-poetry@v1.4
with:
version: 1.8.4
virtualenvs-create: false
- name: Install dependencies
run: |
poetry install --extras "all"
env:
POETRY_VIRTUALENVS_CREATE: false
- name: Set env
run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV
- name: Test
run: |
echo $RELEASE_VERSION
echo ${{ env.RELEASE_VERSION }}
- name: Deploy
run: |
mike deploy --push --update-aliases ${{ env.RELEASE_VERSION }} latest
collerek-ormar-c09209a/.github/workflows/lint.yml 0000664 0000000 0000000 00000002066 15130200524 0022043 0 ustar 00root root 0000000 0000000 # This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: lint
on:
push:
branches-ignore:
- 'gh-pages'
pull_request:
branches: [ master, pydantic_v2 ]
jobs:
lint:
name: "Python ${{ matrix.python-version }}"
runs-on: ubuntu-latest
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'collerek/ormar'
steps:
- uses: actions/checkout@v5
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Install Poetry
uses: snok/install-poetry@v1.4
with:
version: 1.8.4
virtualenvs-create: false
- name: Poetry details
run: |
poetry --version
poetry config --list
- name: Install dependencies
run: poetry install --extras "all" --no-root
- name: Format
run: make fmt
- name: Lint
run: make lint
collerek-ormar-c09209a/.github/workflows/python-publish.yml 0000664 0000000 0000000 00000001625 15130200524 0024062 0 ustar 00root root 0000000 0000000 # This workflows will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
name: Upload Python Package
on:
release:
types: [created]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: '3.x'
- name: Install Poetry
uses: snok/install-poetry@v1.4
with:
version: 1.8.4
virtualenvs-create: true
virtualenvs-in-project: true
- name: Build and publish
run: |
poetry build -vvv
poetry publish -u ${{ secrets.PYPI_USERNAME }} -p ${{ secrets.PYPI_PASSWORD }}
collerek-ormar-c09209a/.github/workflows/test-package.yml 0000664 0000000 0000000 00000007026 15130200524 0023446 0 ustar 00root root 0000000 0000000 # This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: test
on:
push:
branches-ignore:
- 'gh-pages'
pull_request:
branches: [ master, pydantic_v2 ]
jobs:
tests:
name: "Python ${{ matrix.python-version }}"
runs-on: ubuntu-latest
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'collerek/ormar'
strategy:
matrix:
python-version: [3.9, "3.10", 3.11, 3.12]
fail-fast: false
services:
mysql:
image: mysql:5.7
env:
MYSQL_USER: username
MYSQL_PASSWORD: password
MYSQL_ROOT_PASSWORD: password
MYSQL_DATABASE: testsuite
ports:
- 3306:3306
options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3
postgres:
image: postgres:10.8
env:
POSTGRES_USER: username
POSTGRES_PASSWORD: password
POSTGRES_DB: testsuite
ports:
- 5432:5432
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 --name postgres
steps:
- name: Checkout
uses: actions/checkout@v5
with:
submodules: false
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
- name: Install Poetry
uses: snok/install-poetry@v1.4
with:
version: 1.8.4
virtualenvs-create: false
- name: Poetry details
run: |
poetry --version
poetry config --list
- name: Install dependencies
run: poetry install --extras "all"
- name: Run mysql
env:
DATABASE_URL: "mysql://username:password@127.0.0.1:3306/testsuite"
run: bash scripts/test.sh
- name: Install postgresql-client
run: |
sudo apt-get update
sudo apt-get install --yes postgresql-client
- name: Connect to PostgreSQL with CLI
run: env PGPASSWORD=password psql -h localhost -U username -c 'SELECT VERSION();' testsuite
- name: Show max connections
run: env PGPASSWORD=password psql -h localhost -U username -c 'SHOW max_connections;' testsuite
- name: Alter max connections
run: |
docker exec -i postgres bash << EOF
sed -i -e 's/max_connections = 100/max_connections = 1000/' /var/lib/postgresql/data/postgresql.conf
sed -i -e 's/shared_buffers = 128MB/shared_buffers = 512MB/' /var/lib/postgresql/data/postgresql.conf
EOF
docker restart --time 0 postgres
sleep 5
- name: Show max connections
run: env PGPASSWORD=password psql -h localhost -U username -c 'SHOW max_connections;' testsuite
- name: Run postgres
env:
DATABASE_URL: "postgresql://username:password@localhost:5432/testsuite"
run: bash scripts/test.sh
- name: Run sqlite
env:
DATABASE_URL: "sqlite:///testsuite"
run: bash scripts/test.sh
- name: Upload coverage
uses: codecov/codecov-action@v5.5.2
with:
name: codecov-umbrella
token: ${{ secrets.CODCOV_TOKEN }}
verbose: true
- name: Test & publish code coverage
uses: paambaati/codeclimate-action@v9.0.0
if: github.event.pull_request.head.repo.full_name == 'collerek/ormar'
env:
CC_TEST_REPORTER_ID: ${{ secrets.CC_COVERAGE_TOKEN }}
collerek-ormar-c09209a/.github/workflows/test_docs.yml 0000664 0000000 0000000 00000001777 15130200524 0023074 0 ustar 00root root 0000000 0000000 # This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: test_docs
on:
push:
branches-ignore:
- 'gh-pages'
pull_request:
branches: [ master, pydantic_v2 ]
jobs:
tests_docs:
name: "Python ${{ matrix.python-version }}"
runs-on: ubuntu-latest
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'collerek/ormar'
steps:
- uses: actions/checkout@v5
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Install Poetry
uses: snok/install-poetry@v1.4
with:
version: 1.8.4
virtualenvs-create: false
- name: Install dependencies
run: |
poetry install --extras "all"
env:
POETRY_VIRTUALENVS_CREATE: false
- name: Test docs
run: bash scripts/test_docs.sh
collerek-ormar-c09209a/.github/workflows/type-check.yml 0000664 0000000 0000000 00000002040 15130200524 0023121 0 ustar 00root root 0000000 0000000 # This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
name: type_check
on:
push:
branches-ignore:
- 'gh-pages'
pull_request:
branches: [ master, pydantic_v2 ]
jobs:
lint:
name: "Python ${{ matrix.python-version }}"
runs-on: ubuntu-latest
if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'collerek/ormar'
steps:
- uses: actions/checkout@v5
- name: Set up Python
uses: actions/setup-python@v6
with:
python-version: 3.11
- name: Install Poetry
uses: snok/install-poetry@v1.4
with:
version: 1.8.4
virtualenvs-create: false
- name: Poetry details
run: |
poetry --version
poetry config --list
- name: Install dependencies
run: poetry install --extras "all" --no-root
- name: Type check
run: make type_check
collerek-ormar-c09209a/.gitignore 0000664 0000000 0000000 00000000275 15130200524 0016745 0 ustar 00root root 0000000 0000000 p38venv
alembic
alembic.ini
build
.idea
.pytest_cache
.mypy_cache
*.coverage
*.pyc
*.log
test.db
.vscode/
dist
/ormar.egg-info/
site
profile.py
*.db
*.db-journal
*coverage.xml
.benchmarks/
collerek-ormar-c09209a/.pre-commit-config.yaml 0000664 0000000 0000000 00000000253 15130200524 0021232 0 ustar 00root root 0000000 0000000 repos:
- repo: local
hooks:
- id: pre-commit-local
name: format
entry: make pre-commit
language: python
pass_filenames: false
collerek-ormar-c09209a/LICENSE.md 0000664 0000000 0000000 00000002067 15130200524 0016362 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) 2020 Radosław Drążkiewicz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE. collerek-ormar-c09209a/Makefile 0000664 0000000 0000000 00000001727 15130200524 0016420 0 ustar 00root root 0000000 0000000 test_all: test_pg test_mysql test_sqlite
test_pg: export DATABASE_URL=postgresql://username:password@localhost:5432/testsuite
test_pg:
docker-compose -f scripts/docker-compose.yml up -d postgres
bash scripts/test.sh -svv
docker-compose -f scripts/docker-compose.yml stop postgres
test_mysql: export DATABASE_URL=mysql://username:password@127.0.0.1:3306/testsuite
test_mysql:
docker-compose -f "scripts/docker-compose.yml" up -d mysql
bash scripts/test.sh -svv
docker-compose -f scripts/docker-compose.yml stop mysql
test_sqlite:
bash scripts/test.sh -svv
test_docs:
bash scripts/test_docs.sh -svv
test:
pytest -svv tests/
coverage:
pytest --cov=ormar --cov=tests --cov-fail-under=100 --cov-report=term-missing tests
type_check:
mkdir -p .mypy_cache && poetry run python -m mypy ormar tests --ignore-missing-imports --install-types --non-interactive
lint:
poetry run python -m ruff check . --fix
fmt:
poetry run python -m black .
pre-commit: fmt lint type_check collerek-ormar-c09209a/README.md 0000664 0000000 0000000 00000060602 15130200524 0016234 0 ustar 00root root 0000000 0000000 # ormar
### Overview
The `ormar` package is an async mini ORM for Python, with support for **Postgres,
MySQL**, and **SQLite**.
The main benefits of using `ormar` are:
* getting an **async ORM that can be used with async frameworks** (fastapi, starlette etc.)
* getting just **one model to maintain** - you don't have to maintain pydantic and other orm models (sqlalchemy, peewee, gino etc.)
The goal was to create a simple ORM that can be **used directly (as request and response models) with [`fastapi`][fastapi]** that bases it's data validation on pydantic.
Ormar - apart from the obvious "ORM" in name - gets its name from _ormar_ in Swedish which means _snakes_, and _ormar_ in Croatian which means _cabinet_.
And what's a better name for python ORM than snakes cabinet :)
**If you like ormar remember to star the repository in [github](https://github.com/collerek/ormar)!**
The bigger community we build, the easier it will be to catch bugs and attract contributors ;)
### Documentation
Check out the [documentation][documentation] for details.
**Note that for brevity most of the documentation snippets omit the creation of the database
and scheduling the execution of functions for asynchronous run.**
If you want more real life examples than in the documentation you can see the [tests][tests] folder,
since they actually have to create and connect to a database in most of the tests.
Yet remember that those are - well - tests and not all solutions are suitable to be used in real life applications.
### Part of the `fastapi` ecosystem
As part of the fastapi ecosystem `ormar` is supported in libraries that somehow work with databases.
As of now `ormar` is supported by:
* [`fastapi-crudrouter`](https://github.com/awtkns/fastapi-crudrouter)
* [`fastapi-pagination`](https://github.com/uriyyo/fastapi-pagination)
If you maintain or use a different library and would like it to support `ormar` let us know how we can help.
### Dependencies
Ormar is built with:
* [`sqlalchemy core`][sqlalchemy-core] for query building.
* [`databases`][databases] for cross-database async support.
* [`pydantic`][pydantic] for data validation.
* `typing_extensions` for python 3.6 - 3.7
### License
`ormar` is built as open-sorce software and will remain completely free (MIT license).
As I write open-source code to solve everyday problems in my work or to promote and build strong python
community you can say thank you and buy me a coffee or sponsor me with a monthly amount to help ensure my work remains free and maintained.
Sponsor - Github Sponsors
### Migrating from `sqlalchemy` and existing databases
If you currently use `sqlalchemy` and would like to switch to `ormar` check out the auto-translation
tool that can help you with translating existing sqlalchemy orm models so you do not have to do it manually.
**Beta** versions available at github: [`sqlalchemy-to-ormar`](https://github.com/collerek/sqlalchemy-to-ormar)
or simply `pip install sqlalchemy-to-ormar`
`sqlalchemy-to-ormar` can be used in pair with `sqlacodegen` to auto-map/ generate `ormar` models from existing database, even if you don't use `sqlalchemy` for your project.
### Migrations & Database creation
Because ormar is built on SQLAlchemy core, you can use [`alembic`][alembic] to provide
database migrations (and you really should for production code).
For tests and basic applications the `sqlalchemy` is more than enough:
```python
# note this is just a partial snippet full working example below
# 1. Imports
import sqlalchemy
import databases
# 2. Initialization
DATABASE_URL = "sqlite:///db.sqlite"
database = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData()
# Define models here
# 3. Database creation and tables creation
engine = sqlalchemy.create_engine(DATABASE_URL)
metadata.create_all(engine)
```
For a sample configuration of alembic and more information regarding migrations and
database creation visit [migrations][migrations] documentation section.
### Package versions
**ormar is still under development:**
We recommend pinning any dependencies (with i.e. `ormar~=0.9.1`)
`ormar` also follows the release numeration that breaking changes bump the major number,
while other changes and fixes bump minor number, so with the latter you should be safe to
update, yet always read the [releases][releases] docs before.
`example: (0.5.2 -> 0.6.0 - breaking, 0.5.2 -> 0.5.3 - non breaking)`.
### Asynchronous Python
Note that `ormar` is an asynchronous ORM, which means that you have to `await` the calls to
the methods, that are scheduled for execution in an event loop. Python has a builtin module
[`asyncio`][asyncio] that allows you to do just that.
Note that most "normal" python interpreters do not allow execution of `await`
outside of a function (because you actually schedule this function for delayed execution
and don't get the result immediately).
In a modern web framework (like `fastapi`), the framework will handle this for you, but if
you plan to do this on your own you need to perform this manually like described in the
quick start below.
### Quick Start
Note that you can find the same script in examples folder on github.
```python
from typing import Optional
import databases
import pydantic
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///db.sqlite"
base_ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
engine=sqlalchemy.create_engine(DATABASE_URL),
)
# Note that all type hints are optional
# below is a perfectly valid model declaration
# class Author(ormar.Model):
# ormar_config = base_ormar_config.copy(tablename="authors")
#
# id = ormar.Integer(primary_key=True) # <= notice no field types
# name = ormar.String(max_length=100)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
# create the database
# note that in production you should use migrations
# note that this is not required if you connect to existing database
# just to be sure we clear the db before
base_ormar_config.metadata.drop_all(base_ormar_config.engine)
base_ormar_config.metadata.create_all(base_ormar_config.engine)
# all functions below are divided into functionality categories
# note how all functions are defined with async - hence can use await AND needs to
# be awaited on their own
async def create():
# Create some records to work with through QuerySet.create method.
# Note that queryset is exposed on each Model's class as objects
tolkien = await Author.objects.create(name="J.R.R. Tolkien")
await Book.objects.create(author=tolkien, title="The Hobbit", year=1937)
await Book.objects.create(author=tolkien, title="The Lord of the Rings", year=1955)
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
# alternative creation of object divided into 2 steps
sapkowski = Author(name="Andrzej Sapkowski")
# do some stuff
await sapkowski.save()
# or save() after initialization
await Book(author=sapkowski, title="The Witcher", year=1990).save()
await Book(author=sapkowski, title="The Tower of Fools", year=2002).save()
# to read more about inserting data into the database
# visit: https://collerek.github.io/ormar/queries/create/
async def read():
# Fetch an instance, without loading a foreign key relationship on it.
# Django style
book = await Book.objects.get(title="The Hobbit")
# or python style
book = await Book.objects.get(Book.title == "The Hobbit")
book2 = await Book.objects.first()
# first() fetch the instance with lower primary key value
assert book == book2
# you can access all fields on loaded model
assert book.title == "The Hobbit"
assert book.year == 1937
# when no condition is passed to get()
# it behaves as last() based on primary key column
book3 = await Book.objects.get()
assert book3.title == "The Tower of Fools"
# When you have a relation, ormar always defines a related model for you
# even when all you loaded is a foreign key value like in this example
assert isinstance(book.author, Author)
# primary key is populated from foreign key stored in books table
assert book.author.pk == 1
# since the related model was not loaded all other fields are None
assert book.author.name is None
# Load the relationship from the database when you already have the related model
# alternatively see joins section below
await book.author.load()
assert book.author.name == "J.R.R. Tolkien"
# get all rows for given model
authors = await Author.objects.all()
assert len(authors) == 2
# to read more about reading data from the database
# visit: https://collerek.github.io/ormar/queries/read/
async def update():
# read existing row from db
tolkien = await Author.objects.get(name="J.R.R. Tolkien")
assert tolkien.name == "J.R.R. Tolkien"
tolkien_id = tolkien.id
# change the selected property
tolkien.name = "John Ronald Reuel Tolkien"
# call update on a model instance
await tolkien.update()
# confirm that object was updated
tolkien = await Author.objects.get(name="John Ronald Reuel Tolkien")
assert tolkien.name == "John Ronald Reuel Tolkien"
assert tolkien.id == tolkien_id
# alternatively update data without loading
await Author.objects.filter(name__contains="Tolkien").update(name="J.R.R. Tolkien")
# to read more about updating data in the database
# visit: https://collerek.github.io/ormar/queries/update/
async def delete():
silmarillion = await Book.objects.get(year=1977)
# call delete() on instance
await silmarillion.delete()
# alternatively delete without loading
await Book.objects.delete(title="The Tower of Fools")
# note that when there is no record ormar raises NoMatch exception
try:
await Book.objects.get(year=1977)
except ormar.NoMatch:
print("No book from 1977!")
# to read more about deleting data from the database
# visit: https://collerek.github.io/ormar/queries/delete/
# note that despite the fact that record no longer exists in database
# the object above is still accessible and you can use it (and i.e. save()) again.
tolkien = silmarillion.author
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
async def joins():
# Tho join two models use select_related
# Django style
book = await Book.objects.select_related("author").get(title="The Hobbit")
# Python style
book = await Book.objects.select_related(Book.author).get(
Book.title == "The Hobbit"
)
# now the author is already prefetched
assert book.author.name == "J.R.R. Tolkien"
# By default you also get a second side of the relation
# constructed as lowercase source model name +'s' (books in this case)
# you can also provide custom name with parameter related_name
# Django style
author = await Author.objects.select_related("books").all(name="J.R.R. Tolkien")
# Python style
author = await Author.objects.select_related(Author.books).all(
Author.name == "J.R.R. Tolkien"
)
assert len(author[0].books) == 3
# for reverse and many to many relations you can also prefetch_related
# that executes a separate query for each of related models
# Django style
author = await Author.objects.prefetch_related("books").get(name="J.R.R. Tolkien")
# Python style
author = await Author.objects.prefetch_related(Author.books).get(
Author.name == "J.R.R. Tolkien"
)
assert len(author.books) == 3
# to read more about relations
# visit: https://collerek.github.io/ormar/relations/
# to read more about joins and subqueries
# visit: https://collerek.github.io/ormar/queries/joins-and-subqueries/
async def filter_and_sort():
# to filter the query you can use filter() or pass key-value pars to
# get(), all() etc.
# to use special methods or access related model fields use double
# underscore like to filter by the name of the author use author__name
# Django style
books = await Book.objects.all(author__name="J.R.R. Tolkien")
# python style
books = await Book.objects.all(Book.author.name == "J.R.R. Tolkien")
assert len(books) == 3
# filter can accept special methods also separated with double underscore
# to issue sql query ` where authors.name like "%tolkien%"` that is not
# case sensitive (hence small t in Tolkien)
# Django style
books = await Book.objects.filter(author__name__icontains="tolkien").all()
# python style
books = await Book.objects.filter(Book.author.name.icontains("tolkien")).all()
assert len(books) == 3
# to sort use order_by() function of queryset
# to sort decreasing use hyphen before the field name
# same as with filter you can use double underscores to access related fields
# Django style
books = (
await Book.objects.filter(author__name__icontains="tolkien")
.order_by("-year")
.all()
)
# python style
books = (
await Book.objects.filter(Book.author.name.icontains("tolkien"))
.order_by(Book.year.desc())
.all()
)
assert len(books) == 3
assert books[0].title == "The Silmarillion"
assert books[2].title == "The Hobbit"
# to read more about filtering and ordering
# visit: https://collerek.github.io/ormar/queries/filter-and-sort/
async def subset_of_columns():
# to exclude some columns from loading when querying the database
# you can use fields() method
hobbit = await Book.objects.fields(["title"]).get(title="The Hobbit")
# note that fields not included in fields are empty (set to None)
assert hobbit.year is None
assert hobbit.author is None
# selected field is there
assert hobbit.title == "The Hobbit"
# alternatively you can provide columns you want to exclude
hobbit = await Book.objects.exclude_fields(["year"]).get(title="The Hobbit")
# year is still not set
assert hobbit.year is None
# but author is back
assert hobbit.author is not None
# also you cannot exclude primary key column - it's always there
# even if you EXPLICITLY exclude it it will be there
# note that each model have a shortcut for primary_key column which is pk
# and you can filter/access/set the values by this alias like below
assert hobbit.pk is not None
# note that you cannot exclude fields that are not nullable
# (required) in model definition
try:
await Book.objects.exclude_fields(["title"]).get(title="The Hobbit")
except pydantic.ValidationError:
print("Cannot exclude non nullable field title")
# to read more about selecting subset of columns
# visit: https://collerek.github.io/ormar/queries/select-columns/
async def pagination():
# to limit number of returned rows use limit()
books = await Book.objects.limit(1).all()
assert len(books) == 1
assert books[0].title == "The Hobbit"
# to offset number of returned rows use offset()
books = await Book.objects.limit(1).offset(1).all()
assert len(books) == 1
assert books[0].title == "The Lord of the Rings"
# alternatively use paginate that combines both
books = await Book.objects.paginate(page=2, page_size=2).all()
assert len(books) == 2
# note that we removed one book of Sapkowski in delete()
# and recreated The Silmarillion - by default when no order_by is set
# ordering sorts by primary_key column
assert books[0].title == "The Witcher"
assert books[1].title == "The Silmarillion"
# to read more about pagination and number of rows
# visit: https://collerek.github.io/ormar/queries/pagination-and-rows-number/
async def aggregations():
# count:
assert 2 == await Author.objects.count()
# exists
assert await Book.objects.filter(title="The Hobbit").exists()
# maximum
assert 1990 == await Book.objects.max(columns=["year"])
# minimum
assert 1937 == await Book.objects.min(columns=["year"])
# average
assert 1964.75 == await Book.objects.avg(columns=["year"])
# sum
assert 7859 == await Book.objects.sum(columns=["year"])
# to read more about aggregated functions
# visit: https://collerek.github.io/ormar/queries/aggregations/
async def raw_data():
# extract raw data in a form of dicts or tuples
# note that this skips the validation(!) as models are
# not created from parsed data
# get list of objects as dicts
assert await Book.objects.values() == [
{"id": 1, "author": 1, "title": "The Hobbit", "year": 1937},
{"id": 2, "author": 1, "title": "The Lord of the Rings", "year": 1955},
{"id": 4, "author": 2, "title": "The Witcher", "year": 1990},
{"id": 5, "author": 1, "title": "The Silmarillion", "year": 1977},
]
# get list of objects as tuples
assert await Book.objects.values_list() == [
(1, 1, "The Hobbit", 1937),
(2, 1, "The Lord of the Rings", 1955),
(4, 2, "The Witcher", 1990),
(5, 1, "The Silmarillion", 1977),
]
# filter data - note how you always get a list
assert await Book.objects.filter(title="The Hobbit").values() == [
{"id": 1, "author": 1, "title": "The Hobbit", "year": 1937}
]
# select only wanted fields
assert await Book.objects.filter(title="The Hobbit").values(["id", "title"]) == [
{"id": 1, "title": "The Hobbit"}
]
# if you select only one column you could flatten it with values_list
assert await Book.objects.values_list("title", flatten=True) == [
"The Hobbit",
"The Lord of the Rings",
"The Witcher",
"The Silmarillion",
]
# to read more about extracting raw values
# visit: https://collerek.github.io/ormar/queries/aggregations/
async def with_connect(function):
# note that for any other backend than sqlite you actually need to
# connect to the database to perform db operations
async with base_ormar_config.database:
await function()
# note that if you use framework like `fastapi` you shouldn't connect
# in your endpoints but have a global connection pool
# check https://collerek.github.io/ormar/fastapi/ and section with db connection
# gather and execute all functions
# note - normally import should be at the beginning of the file
import asyncio
# note that normally you use gather() function to run several functions
# concurrently but we actually modify the data and we rely on the order of functions
for func in [
create,
read,
update,
delete,
joins,
filter_and_sort,
subset_of_columns,
pagination,
aggregations,
raw_data,
]:
print(f"Executing: {func.__name__}")
asyncio.run(with_connect(func))
# drop the database tables
base_ormar_config.metadata.drop_all(base_ormar_config.engine)
```
## Ormar Specification
### QuerySet methods
* `create(**kwargs): -> Model`
* `get(*args, **kwargs): -> Model`
* `get_or_none(*args, **kwargs): -> Optional[Model]`
* `get_or_create(_defaults: Optional[Dict[str, Any]] = None, *args, **kwargs) -> Tuple[Model, bool]`
* `first(*args, **kwargs): -> Model`
* `update(each: bool = False, **kwargs) -> int`
* `update_or_create(**kwargs) -> Model`
* `bulk_create(objects: List[Model]) -> None`
* `bulk_update(objects: List[Model], columns: List[str] = None) -> None`
* `delete(*args, each: bool = False, **kwargs) -> int`
* `all(*args, **kwargs) -> List[Optional[Model]]`
* `iterate(*args, **kwargs) -> AsyncGenerator[Model]`
* `filter(*args, **kwargs) -> QuerySet`
* `exclude(*args, **kwargs) -> QuerySet`
* `select_related(related: Union[List, str]) -> QuerySet`
* `prefetch_related(related: Union[List, str]) -> QuerySet`
* `limit(limit_count: int) -> QuerySet`
* `offset(offset: int) -> QuerySet`
* `count(distinct: bool = True) -> int`
* `exists() -> bool`
* `max(columns: List[str]) -> Any`
* `min(columns: List[str]) -> Any`
* `avg(columns: List[str]) -> Any`
* `sum(columns: List[str]) -> Any`
* `fields(columns: Union[List, str, set, dict]) -> QuerySet`
* `exclude_fields(columns: Union[List, str, set, dict]) -> QuerySet`
* `order_by(columns:Union[List, str]) -> QuerySet`
* `values(fields: Union[List, str, Set, Dict])`
* `values_list(fields: Union[List, str, Set, Dict])`
#### Relation types
* One to many - with `ForeignKey(to: Model)`
* Many to many - with `ManyToMany(to: Model, Optional[through]: Model)`
#### Model fields types
Available Model Fields (with required args - optional ones in docs):
* `String(max_length)`
* `Text()`
* `Boolean()`
* `Integer()`
* `Float()`
* `Date()`
* `Time()`
* `DateTime()`
* `JSON()`
* `BigInteger()`
* `SmallInteger()`
* `Decimal(scale, precision)`
* `UUID()`
* `LargeBinary(max_length)`
* `Enum(enum_class)`
* `Enum` like Field - by passing `choices` to any other Field type
* `EncryptedString` - by passing `encrypt_secret` and `encrypt_backend`
* `ForeignKey(to)`
* `ManyToMany(to)`
### Available fields options
The following keyword arguments are supported on all field types.
* `primary_key: bool`
* `nullable: bool`
* `default: Any`
* `server_default: Any`
* `index: bool`
* `unique: bool`
* `choices: typing.Sequence`
* `name: str`
All fields are required unless one of the following is set:
* `nullable` - Creates a nullable column. Sets the default to `False`. Read the fields common parameters for details.
* `sql_nullable` - Used to set different setting for pydantic and the database. Sets the default to `nullable` value. Read the fields common parameters for details.
* `default` - Set a default value for the field. **Not available for relation fields**
* `server_default` - Set a default value for the field on server side (like sqlalchemy's `func.now()`). **Not available for relation fields**
* `primary key` with `autoincrement` - When a column is set to primary key and autoincrement is set on this column.
Autoincrement is set by default on int primary keys.
### Available signals
Signals allow to trigger your function for a given event on a given Model.
* `pre_save`
* `post_save`
* `pre_update`
* `post_update`
* `pre_delete`
* `post_delete`
* `pre_relation_add`
* `post_relation_add`
* `pre_relation_remove`
* `post_relation_remove`
* `post_bulk_update`
[sqlalchemy-core]: https://docs.sqlalchemy.org/en/latest/core/
[databases]: https://github.com/encode/databases
[pydantic]: https://pydantic-docs.helpmanual.io/
[encode/orm]: https://github.com/encode/orm/
[alembic]: https://alembic.sqlalchemy.org/en/latest/
[fastapi]: https://fastapi.tiangolo.com/
[documentation]: https://collerek.github.io/ormar/
[migrations]: https://collerek.github.io/ormar/models/migrations/
[asyncio]: https://docs.python.org/3/library/asyncio.html
[releases]: https://collerek.github.io/ormar/releases/
[tests]: https://github.com/collerek/ormar/tree/master/tests
collerek-ormar-c09209a/benchmarks/ 0000775 0000000 0000000 00000000000 15130200524 0017066 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/benchmarks/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0021165 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/benchmarks/conftest.py 0000664 0000000 0000000 00000005174 15130200524 0021274 0 ustar 00root root 0000000 0000000 import asyncio
import random
import string
import time
import nest_asyncio
import ormar
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
nest_asyncio.apply()
pytestmark = pytest.mark.asyncio
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
score: float = ormar.Integer(minimum=0, maximum=100)
class AuthorWithManyFields(Author):
year_born: int = ormar.Integer()
year_died: int = ormar.Integer(nullable=True)
birthplace: str = ormar.String(max_length=255)
class Publisher(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="publishers")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
prestige: int = ormar.Integer(minimum=0, maximum=10)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
author: Author = ormar.ForeignKey(Author, index=True)
publisher: Publisher = ormar.ForeignKey(Publisher, index=True)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
create_test_database = init_tests(base_ormar_config, scope="function")
@pytest_asyncio.fixture
async def author():
author = await Author(name="Author", score=10).save()
return author
@pytest_asyncio.fixture
async def publisher():
publisher = await Publisher(name="Publisher", prestige=random.randint(0, 10)).save()
return publisher
@pytest_asyncio.fixture
async def authors_in_db(num_models: int):
authors = [
Author(
name="".join(random.sample(string.ascii_letters, 5)),
score=int(random.random() * 100),
)
for i in range(0, num_models)
]
await Author.objects.bulk_create(authors)
return await Author.objects.all()
@pytest_asyncio.fixture
@pytest.mark.benchmark(
min_rounds=1, timer=time.process_time, disable_gc=True, warmup=False
)
async def aio_benchmark(benchmark, event_loop: asyncio.BaseEventLoop):
def _fixture_wrapper(func):
def _func_wrapper(*args, **kwargs):
if asyncio.iscoroutinefunction(func):
@benchmark
def benchmarked_func():
a = event_loop.run_until_complete(func(*args, **kwargs))
return a
return benchmarked_func
else:
return benchmark(func, *args, **kwargs)
return _func_wrapper
return _fixture_wrapper
collerek-ormar-c09209a/benchmarks/test_benchmark_aggregate.py 0000664 0000000 0000000 00000002734 15130200524 0024445 0 ustar 00root root 0000000 0000000 from typing import List
import pytest
from benchmarks.conftest import Author
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_count(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def count():
return await Author.objects.count()
c = count()
assert c == len(authors_in_db)
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_avg(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def avg():
return await Author.objects.avg("score")
average = avg()
assert 0 <= average <= 100
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_sum(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def sum_():
return await Author.objects.sum("score")
s = sum_()
assert 0 <= s <= 100 * num_models
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_min(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def min_():
return await Author.objects.min("score")
m = min_()
assert 0 <= m <= 100
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_max(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def max_():
return await Author.objects.max("score")
m = max_()
assert 0 <= m <= 100
collerek-ormar-c09209a/benchmarks/test_benchmark_bulk_create.py 0000664 0000000 0000000 00000001243 15130200524 0024771 0 ustar 00root root 0000000 0000000 import random
import string
import pytest
from benchmarks.conftest import Author
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_making_and_inserting_models_in_bulk(aio_benchmark, num_models: int):
@aio_benchmark
async def make_and_insert(num_models: int):
authors = [
Author(
name="".join(random.sample(string.ascii_letters, 5)),
score=int(random.random() * 100),
)
for i in range(0, num_models)
]
assert len(authors) == num_models
await Author.objects.bulk_create(authors)
make_and_insert(num_models)
collerek-ormar-c09209a/benchmarks/test_benchmark_bulk_update.py 0000664 0000000 0000000 00000001301 15130200524 0025003 0 ustar 00root root 0000000 0000000 import random
import string
from typing import List
import pytest
from benchmarks.conftest import Author
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_updating_models_in_bulk(
aio_benchmark, num_models: int, authors_in_db: List[Author]
):
starting_first_name = authors_in_db[0].name
@aio_benchmark
async def update(authors: List[Author]):
await Author.objects.bulk_update(authors)
for author in authors_in_db:
author.name = "".join(random.sample(string.ascii_letters, 5))
update(authors_in_db)
author = await Author.objects.get(id=authors_in_db[0].id)
assert author.name != starting_first_name
collerek-ormar-c09209a/benchmarks/test_benchmark_create.py 0000664 0000000 0000000 00000005433 15130200524 0023761 0 ustar 00root root 0000000 0000000 import random
import string
import pytest
from benchmarks.conftest import Author, Book, Publisher
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_creating_models_individually(aio_benchmark, num_models: int):
@aio_benchmark
async def create(num_models: int):
authors = []
for idx in range(0, num_models):
author = await Author.objects.create(
name="".join(random.sample(string.ascii_letters, 5)),
score=int(random.random() * 100),
)
authors.append(author)
return authors
authors = create(num_models)
for author in authors:
assert author.id is not None
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_creating_individually_with_related_models(
aio_benchmark, num_models: int, author: Author, publisher: Publisher
):
@aio_benchmark
async def create_with_related_models(
author: Author, publisher: Publisher, num_models: int
):
books = []
for idx in range(0, num_models):
book = await Book.objects.create(
author=author,
publisher=publisher,
title="".join(random.sample(string.ascii_letters, 5)),
year=random.randint(0, 2000),
)
books.append(book)
return books
books = create_with_related_models(
author=author, publisher=publisher, num_models=num_models
)
for book in books:
assert book.id is not None
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_get_or_create_when_create(aio_benchmark, num_models: int):
@aio_benchmark
async def get_or_create(num_models: int):
authors = []
for idx in range(0, num_models):
author, created = await Author.objects.get_or_create(
name="".join(random.sample(string.ascii_letters, 5)),
score=int(random.random() * 100),
)
assert created
authors.append(author)
return authors
authors = get_or_create(num_models)
for author in authors:
assert author.id is not None
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_update_or_create_when_create(aio_benchmark, num_models: int):
@aio_benchmark
async def update_or_create(num_models: int):
authors = []
for idx in range(0, num_models):
author = await Author.objects.update_or_create(
name="".join(random.sample(string.ascii_letters, 5)),
score=int(random.random() * 100),
)
authors.append(author)
return authors
authors = update_or_create(num_models)
for author in authors:
assert author.id is not None
collerek-ormar-c09209a/benchmarks/test_benchmark_delete.py 0000664 0000000 0000000 00000001543 15130200524 0023756 0 ustar 00root root 0000000 0000000 from typing import List
import pytest
from benchmarks.conftest import Author
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_deleting_all(
aio_benchmark, num_models: int, authors_in_db: List[Author]
):
@aio_benchmark
async def delete_all():
await Author.objects.delete(each=True)
delete_all()
num = await Author.objects.count()
assert num == 0
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_deleting_individually(
aio_benchmark, num_models: int, authors_in_db: List[Author]
):
@aio_benchmark
async def delete_one_by_one(authors: List[Author]):
for author in authors:
await Author.objects.filter(id=author.id).delete()
delete_one_by_one(authors_in_db)
num = await Author.objects.count()
assert num == 0
collerek-ormar-c09209a/benchmarks/test_benchmark_get.py 0000664 0000000 0000000 00000006124 15130200524 0023273 0 ustar 00root root 0000000 0000000 import random
import string
from typing import List
import pytest
import pytest_asyncio
from benchmarks.conftest import Author, Book, Publisher
pytestmark = pytest.mark.asyncio
@pytest_asyncio.fixture()
async def books(author: Author, publisher: Publisher, num_models: int):
books = [
Book(
author=author,
publisher=publisher,
title="".join(random.sample(string.ascii_letters, 5)),
year=random.randint(0, 2000),
)
for _ in range(0, num_models)
]
await Book.objects.bulk_create(books)
return books
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_get_all(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def get_all(authors: List[Author]):
return await Author.objects.all()
authors = get_all(authors_in_db)
for idx, author in enumerate(authors_in_db):
assert authors[idx].id == author.id
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_get_all_with_related_models(
aio_benchmark, num_models: int, author: Author, books: List[Book]
):
@aio_benchmark
async def get_with_related(author: Author):
return await Author.objects.select_related("books").all(id=author.id)
authors = get_with_related(author)
assert len(authors[0].books) == num_models
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_get_one(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def get_one(authors: List[Author]):
return await Author.objects.get(id=authors[0].id)
author = get_one(authors_in_db)
assert author == authors_in_db[0]
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_get_or_none(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def get_or_none(authors: List[Author]):
return await Author.objects.get_or_none(id=authors[0].id)
author = get_or_none(authors_in_db)
assert author == authors_in_db[0]
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_get_or_create_when_get(
aio_benchmark, num_models: int, authors_in_db: List[Author]
):
@aio_benchmark
async def get_or_create(authors: List[Author]):
author, created = await Author.objects.get_or_create(id=authors[0].id)
assert not created
return author
author = get_or_create(authors_in_db)
assert author == authors_in_db[0]
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_first(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def first():
return await Author.objects.first()
author = first()
assert author == authors_in_db[0]
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_exists(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def check_exists(authors: List[Author]):
return await Author.objects.filter(id=authors[0].id).exists()
exists = check_exists(authors_in_db)
assert exists
collerek-ormar-c09209a/benchmarks/test_benchmark_init.py 0000664 0000000 0000000 00000002700 15130200524 0023453 0 ustar 00root root 0000000 0000000 import random
import string
import pytest
from benchmarks.conftest import Author, Book, Publisher
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_initializing_models(aio_benchmark, num_models: int):
@aio_benchmark
async def initialize_models(num_models: int):
authors = [
Author(
name="".join(random.sample(string.ascii_letters, 5)),
score=int(random.random() * 100),
)
for i in range(0, num_models)
]
assert len(authors) == num_models
_ = initialize_models(num_models)
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_initializing_models_with_related_models(aio_benchmark, num_models: int):
@aio_benchmark
async def initialize_models_with_related_models(
author: Author, publisher: Publisher, num_models: int
):
_ = [
Book(
author=author,
publisher=publisher,
title="".join(random.sample(string.ascii_letters, 5)),
year=random.randint(0, 2000),
)
for i in range(0, num_models)
]
author = await Author(name="Author", score=10).save()
publisher = await Publisher(name="Publisher", prestige=random.randint(0, 10)).save()
_ = initialize_models_with_related_models(
author=author, publisher=publisher, num_models=num_models
)
collerek-ormar-c09209a/benchmarks/test_benchmark_iterate.py 0000664 0000000 0000000 00000001134 15130200524 0024145 0 ustar 00root root 0000000 0000000 from typing import List
import pytest
from benchmarks.conftest import Author
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_iterate(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def iterate_over_all(authors: List[Author]):
authors = []
async for author in Author.objects.iterate():
authors.append(author)
return authors
authors = iterate_over_all(authors_in_db)
for idx, author in enumerate(authors_in_db):
assert authors[idx].id == author.id
collerek-ormar-c09209a/benchmarks/test_benchmark_save.py 0000664 0000000 0000000 00000003334 15130200524 0023452 0 ustar 00root root 0000000 0000000 import random
import string
import pytest
from benchmarks.conftest import Author, Book, Publisher
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_saving_models_individually(aio_benchmark, num_models: int):
@aio_benchmark
async def make_and_insert(num_models: int):
authors = [
Author(
name="".join(random.sample(string.ascii_letters, 5)),
score=int(random.random() * 100),
)
for i in range(0, num_models)
]
assert len(authors) == num_models
ids = []
for author in authors:
a = await author.save()
ids.append(a)
return ids
ids = make_and_insert(num_models)
for id in ids:
assert id is not None
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_saving_models_individually_with_related_models(
aio_benchmark, num_models: int, author: Author, publisher: Publisher
):
@aio_benchmark
async def making_and_inserting_related_models_one_by_one(
author: Author, publisher: Publisher, num_models: int
):
books = [
Book(
author=author,
publisher=publisher,
title="".join(random.sample(string.ascii_letters, 5)),
year=random.randint(0, 2000),
)
for i in range(0, num_models)
]
ids = []
for book in books:
await book.save()
ids.append(book.id)
return ids
ids = making_and_inserting_related_models_one_by_one(
author=author, publisher=publisher, num_models=num_models
)
for id in ids:
assert id is not None
collerek-ormar-c09209a/benchmarks/test_benchmark_update.py 0000664 0000000 0000000 00000001303 15130200524 0023770 0 ustar 00root root 0000000 0000000 import random
import string
from typing import List
import pytest
from benchmarks.conftest import Author
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [10, 20, 40])
async def test_updating_models_individually(
aio_benchmark, num_models: int, authors_in_db: List[Author]
):
starting_first_name = authors_in_db[0].name
@aio_benchmark
async def update(authors: List[Author]):
for author in authors:
_ = await author.update(
name="".join(random.sample(string.ascii_letters, 5))
)
update(authors_in_db)
author = await Author.objects.get(id=authors_in_db[0].id)
assert author.name != starting_first_name
collerek-ormar-c09209a/benchmarks/test_benchmark_values.py 0000664 0000000 0000000 00000001670 15130200524 0024014 0 ustar 00root root 0000000 0000000 from typing import List
import pytest
from benchmarks.conftest import Author
pytestmark = pytest.mark.asyncio
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_values(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def get_all_values(authors: List[Author]):
return await Author.objects.values()
authors_list = get_all_values(authors_in_db)
for idx, author in enumerate(authors_in_db):
assert authors_list[idx]["id"] == author.id
@pytest.mark.parametrize("num_models", [250, 500, 1000])
async def test_values_list(aio_benchmark, num_models: int, authors_in_db: List[Author]):
@aio_benchmark
async def get_all_values_list(authors: List[Author]):
return await Author.objects.values_list()
authors_list = get_all_values_list(authors_in_db)
for idx, author in enumerate(authors_in_db):
assert authors_list[idx][0] == author.id
collerek-ormar-c09209a/docs/ 0000775 0000000 0000000 00000000000 15130200524 0015701 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs/contributing.md 0000664 0000000 0000000 00000003715 15130200524 0020740 0 ustar 00root root 0000000 0000000 All contributions to *ormar* are welcomed!
## Issues
To make it as simple as possible for us to help you, please include the following:
* OS
* python version
* ormar version
* database backend (mysql, sqlite or postgresql)
Please try to always include the above unless you're unable to install *ormar* or **know** it's not relevant
to your question or feature request.
## Pull Requests
It should be quite straight forward to get started and create a Pull Request.
!!! note
Unless your change is trivial (typo, docs tweak etc.), please create an issue to discuss the change before
creating a pull request.
To make contributing as easy and fast as possible, you'll want to run tests and linting locally.
You'll need to have **python 3.6.2**, **3.7**, or **3.8**, **poetry**, and **git** installed.
```bash
# 1. clone your fork and cd into the repo directory
git clone git@github.com:/ormar.git
cd ormar
# 2. Install ormar, dependencies and test dependencies
poetry install -E dev
# 3. Checkout a new branch and make your changes
git checkout -b my-new-feature-branch
# make your changes...
# 4. Formatting and linting
# ormar uses black for formatting, flake8 for linting and mypy for type hints check
# run all of the following as all those calls will be run on travis after every push
black ormar tests
flake8 ormar
mypy ormar tests
# 5. Run tests
# on localhost all tests are run against sglite backend
# rest of the backends will be checked after push
pytest -svv --cov=ormar --cov=tests --cov-fail-under=100 --cov-report=term-missing
# 6. Build documentation
mkdocs build
# if you have changed the documentation make sure it builds successfully
# you can also use `mkdocs serve` to serve the documentation at localhost:8000
# ... commit, push, and create your pull request
```
!!!tip
For more information on how and why ormar works the way it works
please see the [API documentation][API documentation]
[API documentation]: ./api/index.md collerek-ormar-c09209a/docs/fastapi/ 0000775 0000000 0000000 00000000000 15130200524 0017330 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs/fastapi/index.md 0000664 0000000 0000000 00000014446 15130200524 0020772 0 ustar 00root root 0000000 0000000 # Fastapi
The use of ormar with fastapi is quite simple.
Apart from connecting to databases at startup everything else
you need to do is substitute pydantic models with ormar models.
Here you can find a very simple sample application code.
!!!warning
This example assumes that you already have a database created. If that is not the case please visit [database initialization][database initialization] section.
!!!tip
The following example (all sections) should be put in one file.
It's divided into subsections for clarity.
!!!note
If you want to read more on how you can use ormar models in fastapi requests and
responses check the [responses](response.md) and [requests](requests.md) documentation.
## Quick Start
!!!note
Note that you can find the full quick start script in the [github](https://github.com/collerek/ormar) repo under examples.
### Imports and initialization
Define startup and shutdown procedures using FastAPI lifespan and use is in the
application.
```python
from typing import List, Optional, AsyncIterator
import databases
import sqlalchemy
from fastapi import FastAPI
import ormar
from contextlib import asynccontextmanager
from fastapi import FastAPI
def get_lifespan(config):
@asynccontextmanager
async def lifespan(_: FastAPI) -> AsyncIterator[None]:
if not config.database.is_connected:
await config.database.connect()
yield
if config.database.is_connected:
await config.database.disconnect()
return lifespan
base_ormar_config = ormar.OrmarConfig(
metadata=sqlalchemy.MetaData(),
database=databases.Database("sqlite:///test.db"),
)
app = FastAPI(lifespan=get_lifespan(base_ormar_config))
```
!!!info
You can read more on connecting to databases in [fastapi][fastapi] documentation
### Models definition
Define ormar models with appropriate fields.
Those models will be used instead of pydantic ones.
```python
base_ormar_config = OrmarConfig(
metadata = metadata
database = database
)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
```
!!!tip
You can read more on defining `Models` in [models][models] section.
### Fastapi endpoints definition
Define your desired endpoints, note how `ormar` models are used both
as `response_model` and as a requests parameters.
```python
@app.get("/items/", response_model=List[Item])
async def get_items():
items = await Item.objects.select_related("category").all()
return items
@app.post("/items/", response_model=Item)
async def create_item(item: Item):
await item.save()
return item
@app.post("/categories/", response_model=Category)
async def create_category(category: Category):
await category.save()
return category
@app.put("/items/{item_id}")
async def get_item(item_id: int, item: Item):
item_db = await Item.objects.get(pk=item_id)
return await item_db.update(**item.model_dump())
@app.delete("/items/{item_id}")
async def delete_item(item_id: int, item: Item = None):
if item:
return {"deleted_rows": await item.delete()}
item_db = await Item.objects.get(pk=item_id)
return {"deleted_rows": await item_db.delete()}
```
!!!note
Note how ormar `Model` methods like save() are available straight out of the box after fastapi initializes it for you.
!!!note
Note that you can return a `Model` (or list of `Models`) directly - fastapi will jsonize it for you
### Test the application
#### Run fastapi
If you want to run this script and play with fastapi swagger install uvicorn first
`pip install uvicorn`
And launch the fastapi.
`uvicorn :app --reload`
Now you can navigate to your browser (by default fastapi address is `127.0.0.1:8000/docs`) and play with the api.
!!!info
You can read more about running fastapi in [fastapi][fastapi] docs.
#### Test with pytest
Here you have a sample test that will prove that everything works as intended.
Be sure to create the tables first. If you are using pytest you can use a fixture.
```python
@pytest.fixture(autouse=True, scope="module")
def create_test_database():
engine = sqlalchemy.create_engine(DATABASE_URL)
metadata.create_all(engine)
yield
metadata.drop_all(engine)
```
```python
# here is a sample test to check the working of the ormar with fastapi
from starlette.testclient import TestClient
def test_all_endpoints():
# note that TestClient is only sync, don't use asyns here
client = TestClient(app)
# note that you need to connect to database manually
# or use client as contextmanager during tests
with client as client:
response = client.post("/categories/", json={"name": "test cat"})
category = response.json()
response = client.post(
"/items/", json={"name": "test", "id": 1, "category": category}
)
item = Item(**response.json())
assert item.pk is not None
response = client.get("/items/")
items = [Item(**item) for item in response.json()]
assert items[0] == item
item.name = "New name"
response = client.put(f"/items/{item.pk}", json=item.model_dump())
assert response.json() == item.model_dump()
response = client.get("/items/")
items = [Item(**item) for item in response.json()]
assert items[0].name == "New name"
response = client.delete(f"/items/{item.pk}", json=item.model_dump())
assert response.json().get("deleted_rows", "__UNDEFINED__") != "__UNDEFINED__"
response = client.get("/items/")
items = response.json()
assert len(items) == 0
```
!!!tip
If you want to see more test cases and how to test ormar/fastapi see [tests][tests] directory in the github repo
!!!info
You can read more on testing fastapi in [fastapi][fastapi] docs.
[fastapi]: https://fastapi.tiangolo.com/
[models]: ../models/index.md
[database initialization]: ../models/migrations.md
[tests]: https://github.com/collerek/ormar/tree/master/tests
collerek-ormar-c09209a/docs/fastapi/requests.md 0000664 0000000 0000000 00000014566 15130200524 0021541 0 ustar 00root root 0000000 0000000 # Request
You can use ormar Models in `fastapi` request `Body` parameters instead of pydantic models.
You can of course also mix `ormar.Model`s with `pydantic` ones if you need to.
One of the most common tasks in requests is excluding certain fields that you do not want to include in the payload you send to API.
This can be achieved in several ways in `ormar` so below you can review your options and select the one most suitable for your situation.
## Excluding fields in request
### Optional fields
Note that each field that is optional is not required, that means that Optional fields can be skipped both in response and in requests.
Field is not required if (any/many/all) of following:
* Field is marked with `nullable=True`
* Field has `default` value or function provided, i.e. `default="Test"`
* Field has a `server_default` value set
* Field is an `autoincrement=True` `primary_key` field (note that `ormar.Integer` `primary_key` is `autoincrement` by default)
Example:
```python
base_ormar_config = ormar.OrmarConfig(
metadata=metadata
database=database
)
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(max_length=255)
password: str = ormar.String(max_length=255)
first_name: str = ormar.String(max_length=255, nullable=True)
last_name: str = ormar.String(max_length=255)
category: str = ormar.String(max_length=255, default="User")
```
In above example fields `id` (is an `autoincrement` `Integer`), `first_name` ( has `nullable=True`) and `category` (has `default`) are optional and can be skipped in response and model will still validate.
If the field is nullable you don't have to include it in payload during creation as well as in response, so given example above you can:
!!!Warning
Note that although you do not have to pass the optional field, you still **can** do it.
And if someone will pass a value it will be used later unless you take measures to prevent it.
```python
# note that app is an FastApi app
@app.post("/users/", response_model=User) # here we use ormar.Model in response
async def create_user(user: User): # here we use ormar.Model in request parameter
return await user.save()
```
That means that if you do not pass i.e. `first_name` in request it will validate correctly (as field is optional), `None` will be saved in the database.
### Generate `pydantic` model from `ormar.Model`
Since task of excluding fields is so common `ormar` has a special way to generate `pydantic` models from existing `ormar.Models` without you needing to retype all the fields.
That method is `get_pydantic()` method available on all models classes.
```python
# generate a tree of models without password on User and without priority on nested Category
RequestUser = User.get_pydantic(exclude={"password": ..., "category": {"priority"}})
@app.post("/users3/", response_model=User) # here you can also use both ormar/pydantic
async def create_user3(user: RequestUser): # use the generated model here
# note how now user is pydantic and not ormar Model so you need to convert
return await User(**user.model_dump()).save()
```
!!!Note
To see more examples and read more visit [get_pydantic](../models/methods.md#get_pydantic) part of the documentation.
!!!Warning
The `get_pydantic` method generates all models in a tree of nested models according to an algorithm that allows to avoid loops in models (same algorithm that is used in `model_dump()`, `select_all()` etc.)
That means that nested models won't have reference to parent model (by default ormar relation is bidirectional).
Note also that if given model exists in a tree more than once it will be doubled in pydantic models (each occurrence will have separate own model). That way you can exclude/include different fields on different leafs of the tree.
#### Mypy and type checking
Note that assigning a function as a python type passes at runtime (as it's not checked) the static type checkers like mypy will complain.
Although result of the function call will always be the same for given model using a dynamically created type is not allowed.
Therefore you have two options:
First one is to simply add `# type: ignore` to skip the type checking
```python
RequestUser = User.get_pydantic(exclude={"password": ..., "category": {"priority"}})
@app.post("/users3/", response_model=User)
async def create_user3(user: RequestUser): # type: ignore
# note how now user is not ormar Model so you need to convert
return await User(**user.model_dump()).save()
```
The second one is a little bit more hacky and utilizes a way in which fastapi extract function parameters.
You can overwrite the `__annotations__` entry for given param.
```python
RequestUser = User.get_pydantic(exclude={"password": ..., "category": {"priority"}})
# do not use the app decorator
async def create_user3(user: User): # use ormar model here
return await User(**user.model_dump()).save()
# overwrite the function annotations entry for user param with generated model
create_user3.__annotations__["user"] = RequestUser
# manually call app functions (app.get, app.post etc.) and pass your function reference
app.post("/categories/", response_model=User)(create_user3)
```
Note that this will cause mypy to "think" that user is an ormar model but since in request it doesn't matter that much (you pass jsonized dict anyway and you need to convert before saving).
That still should work fine as generated model will be a subset of fields, so all needed fields will validate, and all not used fields will fail at runtime.
### Separate `pydantic` model
The final solution is to just create separate pydantic model manually.
That works exactly the same as with normal fastapi application, so you can have different models for response and requests etc.
Sample:
```python
import pydantic
class UserCreate(pydantic.BaseModel):
model_config = pydantic.ConfigDict(from_attributes=True)
email: str
first_name: str
last_name: str
password: str
@app.post("/users3/", response_model=User) # use ormar model here (but of course you CAN use pydantic also here)
async def create_user3(user: UserCreate): # use pydantic model here
# note how now request param is a pydantic model and not the ormar one
# so you need to parse/convert it to ormar before you can use database
return await User(**user.model_dump()).save()
```
collerek-ormar-c09209a/docs/fastapi/response.md 0000664 0000000 0000000 00000024026 15130200524 0021514 0 ustar 00root root 0000000 0000000 # Response
You can use ormar Models in `fastapi` response_model instead of pydantic models.
You can of course also mix `ormar.Model`s with `pydantic` ones if you need to.
One of the most common tasks in responses is excluding certain fields that you do not want to include in response data.
This can be achieved in several ways in `ormar` so below you can review your options and select the one most suitable for your situation.
## Excluding fields in response
### Optional fields
Note that each field that is optional is not required, that means that Optional fields can be skipped both in response and in requests.
Field is not required if (any/many/all) of following:
* Field is marked with `nullable=True`
* Field has `default` value or function provided, i.e. `default="Test"`
* Field has a `server_default` value set
* Field is an `autoincrement=True` `primary_key` field (note that `ormar.Integer` `primary_key` is `autoincrement` by default)
Example:
```python
base_ormar_config = ormar.OrmarConfig(
metadata=metadata
database=database
)
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(max_length=255)
password: str = ormar.String(max_length=255)
first_name: str = ormar.String(max_length=255, nullable=True)
last_name: str = ormar.String(max_length=255)
category: str = ormar.String(max_length=255, default="User")
```
In above example fields `id` (is an `autoincrement` `Integer`), `first_name` ( has `nullable=True`) and `category` (has `default`) are optional and can be skipped in response and model will still validate.
If the field is nullable you don't have to include it in payload during creation as well as in response, so given example above you can:
```python
# note that app is an FastApi app
@app.post("/users/", response_model=User) # here we use ormar.Model in response
async def create_user(user: User): # here we use ormar.Model in request parameter
return await user.save()
```
That means that if you do not pass i.e. `first_name` in request it will validate correctly (as field is optional), save in the database and return the saved record without this field (which will also pass validation).
!!!Note
Note that although you do not pass the **field value**, the **field itself** is still present in the `response_model` that means it **will be present in response data** and set to `None`.
If you want to fully exclude the field from the result read on.
### FastApi `response_model_exclude`
Fastapi has `response_model_exclude` that accepts a set (or a list) of field names.
That has it's limitation as `ormar` and `pydantic` accepts also dictionaries in which you can set exclude/include columns also on nested models (more on this below)
!!!Warning
Note that you cannot exclude required fields when using `response_model` as it will fail during validation.
```python
@app.post("/users/", response_model=User, response_model_exclude={"password"})
async def create_user(user: User):
return await user.save()
```
Above endpoint can be queried like this:
```python
from starlette.testclient import TestClient
client = TestClient(app)
with client as client:
# note there is no pk
user = {
"email": "test@domain.com",
"password": "^*^%A*DA*IAAA",
"first_name": "John",
"last_name": "Doe",
}
response = client.post("/users/", json=user)
# note that the excluded field is fully gone from response
assert "password" not in response.json()
# read the response and initialize model out of it
created_user = User(**response.json())
# note pk is populated by autoincrement
assert created_user.pk is not None
# note that password is missing in initialized model too
assert created_user.password is None
```
!!!Note
Note how in above example `password` field is fully gone from the response data.
Note that you can use this method only for non-required fields.
#### Nested models excludes
Despite the fact that `fastapi` allows passing only set of field names, so simple excludes, when using `response_model_exclude`, ormar is smarter.
In `ormar` you can exclude nested models using two types of notations.
One is a dictionary with nested fields that represents the model tree structure, and the second one is double underscore separated path of field names.
Assume for a second that our user's category is a separate model:
```python
base_ormar_config = ormar.OrmarConfig(
metadata=metadata
database=database
)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255)
priority: int = ormar.Integer(nullable=True)
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(max_length=255)
password: str = ormar.String(max_length=255)
first_name: str = ormar.String(max_length=255, nullable=True)
last_name: str = ormar.String(max_length=255)
category: Optional[Category] = ormar.ForeignKey(Category, related_name="categories")
```
If you want to exclude `priority` from category in your response, you can still use fastapi parameter.
```python
@app.post("/users/", response_model=User, response_model_exclude={"category__priority"})
async def create_user(user: User):
return await user.save()
```
Note that you can go in deeper models with double underscore, and if you want to exclude multiple fields from nested model you need to prefix them with full path.
In example `response_model_exclude={"category__priority", "category__other_field", category__nested_model__nested_model_field}` etc.
!!!Note
To read more about possible excludes and how to structure your exclude dictionary or set visit [fields](../queries/select-columns.md#fields) section of documentation
!!!Note
Note that apart from `response_model_exclude` parameter `fastapi` supports also other parameters inherited from `pydantic`.
All of them works also with ormar, but can have some nuances so best to read [dict](../models/methods.md#dict) part of the documentation.
### Exclude in `Model.model_dump()`
Alternatively you can just return a dict from `ormar.Model` and use .
Like this you can also set exclude/include as dict and exclude fields on nested models too.
!!!Warning
Not using a `response_model` will cause api documentation having no response example and schema since in theory response can have any format.
```python
@app.post("/users2/", response_model=User)
async def create_user2(user: User):
user = await user.save()
return user.model_dump(exclude={'password'})
# could be also something like return user.model_dump(exclude={'category': {'priority'}}) to exclude category priority
```
!!!Note
Note that above example will nullify the password field even if you pass it in request, but the **field will be still there** as it's part of the response schema, the value will be set to `None`.
If you want to fully exclude the field with this approach simply don't use `response_model` and exclude in Model's model_dump()
Alternatively you can just return a dict from ormar model.
Like this you can also set exclude/include as dict and exclude fields on nested models.
!!!Note
In theory you loose validation of response here but since you operate on `ormar.Models` the response data have already been validated after db query (as ormar model is pydantic model).
So if you skip `response_model` altogether you can do something like this:
```python
@app.post("/users4/") # note no response_model
async def create_user4(user: User):
user = await user.save()
return user.model_dump(exclude={'last_name'})
```
!!!Note
Note that when you skip the response_model you can now **exclude also required fields** as the response is no longer validated after being returned.
The cost of this solution is that you loose also api documentation as response schema in unknown from fastapi perspective.
### Generate `pydantic` model from `ormar.Model`
Since task of excluding fields is so common `ormar` has a special way to generate `pydantic` models from existing `ormar.Models` without you needing to retype all the fields.
That method is `get_pydantic()` method available on all models classes.
```python
# generate a tree of models without password on User and without priority on nested Category
ResponseUser = User.get_pydantic(exclude={"password": ..., "category": {"priority"}})
@app.post("/users3/", response_model=ResponseUser) # use the generated model here
async def create_user3(user: User):
return await user.save()
```
!!!Note
To see more examples and read more visit [get_pydantic](../models/methods.md#get_pydantic) part of the documentation.
!!!Warning
The `get_pydantic` method generates all models in a tree of nested models according to an algorithm that allows to avoid loops in models (same algorithm that is used in `model_dump()`, `select_all()` etc.)
That means that nested models won't have reference to parent model (by default ormar relation is bidirectional).
Note also that if given model exists in a tree more than once it will be doubled in pydantic models (each occurrence will have separate own model). That way you can exclude/include different fields on different leafs of the tree.
### Separate `pydantic` model
The final solution is to just create separate pydantic model manually.
That works exactly the same as with normal fastapi application so you can have different models for response and requests etc.
Sample:
```python
import pydantic
class UserBase(pydantic.BaseModel):
model_config = pydantic.ConfigDict(from_attributes=True)
email: str
first_name: str
last_name: str
@app.post("/users3/", response_model=UserBase) # use pydantic model here
async def create_user3(user: User): #use ormar model here (but of course you CAN use pydantic also here)
return await user.save()
```
collerek-ormar-c09209a/docs/fields/ 0000775 0000000 0000000 00000000000 15130200524 0017147 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs/fields/common-parameters.md 0000664 0000000 0000000 00000015652 15130200524 0023133 0 ustar 00root root 0000000 0000000 # Common Parameters
All `Field` types have a set of common parameters.
## primary_key
`primary_key`: `bool` = `False` -> by default False.
Sets the primary key column on a table, foreign keys always refer to the pk of the `Model`.
Used in sql only.
## autoincrement
`autoincrement`: `bool` = `primary_key and type == int` -> defaults to True if column is a primary key and of type Integer, otherwise False.
Can be only used with int/bigint fields.
If a field has autoincrement it becomes optional.
Used both in sql and pydantic (changes pk field to optional for autoincrement).
## nullable
`nullable`: `bool` = `False` -> defaults to False for all fields except relation fields.
Automatically changed to True if user provide one of the following:
* `default` value or function is provided
* `server_default` value or function is provided
* `autoincrement` is set on `Integer` `primary_key` field
Specifies if field is optional or required, used both with sql and pydantic.
By default, used for both `pydantic` and `sqlalchemy` as those are the most common settings:
* `nullable=False` - means database column is not null and field is required in pydantic
* `nullable=True` - means database column is null and field is optional in pydantic
If you want to set different setting for pydantic and the database see `sql_nullable` below.
!!!note
By default all `ForeignKeys` are also nullable, meaning the related `Model` is not required.
If you change the `ForeignKey` column to `nullable=False`, it becomes required.
## sql_nullable
`sql_nullable`: `bool` = `nullable` -> defaults to the value of nullable (described above).
Specifies if field is not null or allows nulls in the database only.
Use this setting in combination with `nullable` only if you want to set different options on pydantic model and in the database.
A sample usage might be i.e. making field not null in the database, but allow this field to be nullable in pydantic (i.e. with `server_default` value).
That will prevent the updates of the field to null (as with `server_default` set you cannot insert null values already as the default value would be used)
## default
`default`: `Any` = `None` -> defaults to None.
A default value used if no other value is passed.
In sql invoked on an insert, used during pydantic model definition.
If the field has a default value it becomes optional.
You can pass a static value or a Callable (function etc.)
Used both in sql and pydantic.
Sample usage:
```python
# note the distinction between passing a value and Callable pointer
# value
name: str = ormar.String(max_length=200, default="Name")
# note that when you call a function it's not a pointer to Callable
# a definition like this will call the function at startup and assign
# the result of the function to the default, so it will be constant value for all instances
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now())
# if you want to pass Callable reference (note that it cannot have arguments)
# note lack of the parenthesis -> ormar will call this function for you on each model
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
# Callable can be a function, builtin, class etc.
```
## server default
`server_default`: `Any` = `None` -> defaults to None.
A default value used if no other value is passed.
In sql invoked on the server side so you can pass i.e. sql function (like now() or query/value wrapped in sqlalchemy text() clause).
If the field has a server_default value it becomes optional.
You can pass a static value or a Callable (function etc.)
Used in sql only.
Sample usage:
```Python hl_lines="20-22"
--8<-- "../docs_src/fields/docs004.py"
```
!!!warning
`server_default` accepts `str`, `sqlalchemy.sql.elements.ClauseElement` or `sqlalchemy.sql.elements.TextClause`
so if you want to set i.e. Integer value you need to wrap it in `sqlalchemy.text()` function like above
!!!tip
You can pass also valid sql (dialect specific) wrapped in `sqlalchemy.text()`
For example `func.now()` above could be exchanged for `text('(CURRENT_TIMESTAMP)')` for sqlite backend
!!!info
`server_default` is passed straight to sqlalchemy table definition so you can read more in [server default][server default] sqlalchemy documentation
## name
`name`: `str` = `None` -> defaults to None
Allows you to specify a column name alias to be used.
Useful for existing database structures that use a reserved keyword, or if you would like to use database name that is different from `ormar` field name.
Take for example the snippet below.
`from`, being a reserved word in python, will prevent you from creating a model with that column name.
Changing the model name to `from_` and adding the parameter `name='from'` will cause ormar to use `from` for the database column name.
```python
#... rest of Model cut for brevity
from_: str = ormar.String(max_length=15, name='from')
```
Similarly, you can change the foreign key column names in database, while keeping the desired relation name in ormar:
```python
# ... rest of Model cut for brevity
album: Optional[Album] = ormar.ForeignKey(Album, name="album_id")
```
## index
`index`: `bool` = `False` -> by default False,
Sets the index on a table's column.
Used in sql only.
## unique
`unique`: `bool` = `False`
Sets the unique constraint on a table's column.
Used in sql only.
## overwrite_pydantic_type
By default, ormar uses predefined pydantic field types that it applies on model creation (hence the type hints are optional).
If you want to, you can apply your own type, that will be **completely** replacing the build in one.
So it's on you as a user to provide a type that is valid in the context of given ormar field type.
!!!warning
Note that by default you should use build in arguments that are passed to underlying pydantic field.
You can check what arguments are supported in field types section or in [pydantic](https://pydantic-docs.helpmanual.io/usage/schema/#field-customisation) docs.
!!!danger
Setting a wrong type of pydantic field can break your model, so overwrite it only when you know what you are doing.
As it's easy to break functionality of ormar the `overwrite_pydantic_type` argument is not available on relation fields!
```python
base_ormar_config = ormar.OrmarConfig(
metadata=metadata
database=database
)
# sample overwrites
class OverwriteTest(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="overwrites")
id: int = ormar.Integer(primary_key=True)
my_int: str = ormar.Integer(overwrite_pydantic_type=PositiveInt)
constraint_dict: Json = ormar.JSON(
overwrite_pydantic_type=Optional[Json[Dict[str, int]]])
```
[relations]: ../relations/index.md
[queries]: ../queries/index.md
[pydantic]: https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
[server default]: https://docs.sqlalchemy.org/en/13/core/defaults.html#server-invoked-ddl-explicit-default-expressions
collerek-ormar-c09209a/docs/fields/encryption.md 0000664 0000000 0000000 00000013131 15130200524 0021662 0 ustar 00root root 0000000 0000000 # Encryption
`ormar` provides you with a way to encrypt a field in the database only.
Provided encryption backends allow for both one-way encryption (`HASH` backend) as
well as both-way encryption/decryption (`FERNET` backend).
!!!warning
Note that in order for encryption to work you need to install optional `cryptography` package.
You can do it manually `pip install cryptography` or with ormar by `pip install ormar[crypto]`
!!!warning
Note that adding `encrypt_backend` changes the database column type to `TEXT`,
which needs to be reflected in db either by migration (`alembic`) or manual change
## Defining a field encryption
To encrypt a field you need to pass at minimum `encrypt_secret` and `encrypt_backend` parameters.
```python hl_lines="10-12"
base_ormar_config = ormar.OrmarConfig(
metadata=metadata
database=database
)
class Filter(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100,
encrypt_secret="secret123",
encrypt_backend=ormar.EncryptBackends.FERNET)
```
!!!warning
You can encrypt all `Field` types apart from `primary_key` column and relation
columns (`ForeignKey` and `ManyToMany`). Check backends details for more information.
## Available backends
### HASH
HASH is a one-way hash (like for password), never decrypted on retrieval
To set it up pass appropriate backend value.
```python
... # rest of model definition
password: str = ormar.String(max_length=128,
encrypt_secret="secret123",
encrypt_backend=ormar.EncryptBackends.HASH)
```
Note that since this backend never decrypt the stored value it's only applicable for
`String` fields. Used hash is a `sha512` hash, so the field length has to be >=128.
!!!warning
Note that in `HASH` backend you can filter by full value but filters like `contain` will not work as comparison is make on encrypted values
!!!note
Note that provided `encrypt_secret` is first hashed itself and used as salt, so in order to
compare to stored string you need to recreate this steps. The `order_by` will not work as encrypted strings are compared so you cannot reliably order by.
```python
class Hash(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="hashes")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=128,
encrypt_secret="udxc32",
encrypt_backend=ormar.EncryptBackends.HASH)
await Hash(name='test1').save()
# note the steps to recreate the stored value
# you can use also cryptography package instead of hashlib
secret = hashlib.sha256("udxc32".encode()).digest()
secret = base64.urlsafe_b64encode(secret)
hashed_test1 = hashlib.sha512(secret + 'test1'.encode()).hexdigest()
# full value comparison works
hash1 = await Hash.objects.get(name='test1')
assert hash1.name == hashed_test1
# but partial comparison does not (hashed strings are compared)
with pytest.raises(NoMatch):
await Filter.objects.get(name__icontains='test')
```
### FERNET
FERNET is a two-way encrypt/decrypt backend
To set it up pass appropriate backend value.
```python
... # rest of model definition
year: int = ormar.Integer(encrypt_secret="secret123",
encrypt_backend=ormar.EncryptBackends.FERNET)
```
Value is encrypted on way to database end decrypted on way out. Can be used on all types,
as the returned value is parsed to corresponding python type.
!!!warning
Note that in `FERNET` backend you loose `filter`ing possibility altogether as part of the encrypted value is a timestamp.
The same goes for `order_by` as encrypted strings are compared so you cannot reliably order by.
```python
class Filter(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100,
encrypt_secret="asd123",
encrypt_backend=ormar.EncryptBackends.FERNET)
await Filter(name='test1').save()
await Filter(name='test1').save()
# values are properly encrypted and later decrypted
filters = await Filter.objects.all()
assert filters[0].name == filters[1].name == 'test1'
# but you cannot filter at all since part of the fernet hash is a timestamp
# which means that even if you encrypt the same string 2 times it will be different
with pytest.raises(NoMatch):
await Filter.objects.get(name='test1')
```
## Custom Backends
If you wish to support other type of encryption (i.e. AES) you can provide your own `EncryptionBackend`.
To setup a backend all you need to do is subclass `ormar.fields.EncryptBackend` class and provide required backend.
Sample dummy backend (that does nothing) can look like following:
```python
class DummyBackend(ormar.fields.EncryptBackend):
def _initialize_backend(self, secret_key: bytes) -> None:
pass
def encrypt(self, value: Any) -> str:
return value
def decrypt(self, value: Any) -> str:
return value
```
To use this backend set `encrypt_backend` to `CUSTOM` and provide your backend as
argument by `encrypt_custom_backend`.
```python
class Filter(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100,
encrypt_secret="secret123",
encrypt_backend=ormar.EncryptBackends.CUSTOM,
encrypt_custom_backend=DummyBackend
)
```
collerek-ormar-c09209a/docs/fields/field-types.md 0000664 0000000 0000000 00000015167 15130200524 0021730 0 ustar 00root root 0000000 0000000 # Fields
There are 12 basic model field types and a special `ForeignKey` and `ManyToMany` fields to establish relationships between models.
!!!tip
For explanation of `ForeignKey` and `ManyToMany` fields check [relations][relations].
Each of the `Fields` has assigned both `sqlalchemy` column class and python type that is used to create `pydantic` model.
## Fields Types
### String
`String(max_length: int,
min_length: int = None,
regex: str = None,)` has a required `max_length` parameter.
* Sqlalchemy column: `sqlalchemy.String`
* Type (used for pydantic): `str`
!!!tip
For explanation of other parameters check [pydantic](https://pydantic-docs.helpmanual.io/usage/schema/#field-customisation) documentation.
### Text
`Text()` has no required parameters.
* Sqlalchemy column: `sqlalchemy.Text`
* Type (used for pydantic): `str`
!!!tip
For explanation of other parameters check [pydantic][pydantic] documentation.
### Boolean
`Boolean()` has no required parameters.
* Sqlalchemy column: `sqlalchemy.Boolean`
* Type (used for pydantic): `bool`
### Integer
`Integer(minimum: int = None,
maximum: int = None,
multiple_of: int = None)` has no required parameters.
* Sqlalchemy column: `sqlalchemy.Integer`
* Type (used for pydantic): `int`
!!!tip
For explanation of other parameters check [pydantic][pydantic] documentation.
### BigInteger
`BigInteger(minimum: int = None,
maximum: int = None,
multiple_of: int = None)` has no required parameters.
* Sqlalchemy column: `sqlalchemy.BigInteger`
* Type (used for pydantic): `int`
!!!tip
For explanation of other parameters check [pydantic][pydantic] documentation.
### SmallInteger
`SmallInteger(minimum: int = None,
maximum: int = None,
multiple_of: int = None)` has no required parameters.
* Sqlalchemy column: `sqlalchemy.SmallInteger`
* Type (used for pydantic): `int`
!!!tip
For explanation of other parameters check [pydantic][pydantic] documentation.
### Float
`Float(minimum: float = None,
maximum: float = None,
multiple_of: int = None)` has no required parameters.
* Sqlalchemy column: `sqlalchemy.Float`
* Type (used for pydantic): `float`
!!!tip
For explanation of other parameters check [pydantic][pydantic] documentation.
### Decimal
`Decimal(minimum: float = None,
maximum: float = None,
multiple_of: int = None,
precision: int = None,
scale: int = None,
max_digits: int = None,
decimal_places: int = None)` has no required parameters
You can use either `length` and `precision` parameters or `max_digits` and `decimal_places`.
* Sqlalchemy column: `sqlalchemy.DECIMAL`
* Type (used for pydantic): `decimal.Decimal`
!!!tip
For explanation of other parameters check [pydantic][pydantic] documentation.
### Date
`Date()` has no required parameters.
* Sqlalchemy column: `sqlalchemy.Date`
* Type (used for pydantic): `datetime.date`
### Time
`Time(timezone: bool = False)` has no required parameters.
You can pass `timezone=True` for timezone aware database column.
* Sqlalchemy column: `sqlalchemy.Time`
* Type (used for pydantic): `datetime.time`
### DateTime
`DateTime(timezone: bool = False)` has no required parameters.
You can pass `timezone=True` for timezone aware database column.
* Sqlalchemy column: `sqlalchemy.DateTime`
* Type (used for pydantic): `datetime.datetime`
### JSON
`JSON()` has no required parameters.
* Sqlalchemy column: `sqlalchemy.JSON`
* Type (used for pydantic): `pydantic.Json`
### LargeBinary
`LargeBinary(max_length)` has a required `max_length` parameter.
* Sqlalchemy column: `sqlalchemy.LargeBinary`
* Type (used for pydantic): `bytes`
LargeBinary length is used in some backend (i.e. mysql) to determine the size of the field,
in other backends it's simply ignored yet in ormar it's always required. It should be max
size of the file/bytes in bytes.
`LargeBinary` has also optional `represent_as_base64_str: bool = False` flag.
When set to `True` `ormar` will auto-convert bytes value to base64 decoded string,
you can also set value by passing a base64 encoded string.
That way you can i.e. set the value by API, even if value is not `utf-8` compatible and would otherwise fail during json conversion.
```python
import base64
... # other imports skipped for brevity
base_ormar_config = ormar.OrmarConfig(
metadata=metadata
database=database
)
class LargeBinaryStr(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="my_str_blobs")
id: int = ormar.Integer(primary_key=True)
test_binary: str = ormar.LargeBinary(
max_length=100000, represent_as_base64_str=True
)
# set non utf-8 compliant value - note this can be passed by api (i.e. fastapi) in json
item = LargeBinaryStr(test_binary=base64.b64encode(b"\xc3\x28").decode())
assert item.test_binary == base64.b64encode(b"\xc3\x28").decode()
# technical note that underlying value is still bytes and will be saved as so
assert item.__dict__["test_binary"] == b"\xc3\x28"
```
### UUID
`UUID(uuid_format: str = 'hex')` has no required parameters.
* Sqlalchemy column: `ormar.UUID` based on `sqlalchemy.CHAR(36)` or `sqlalchemy.CHAR(32)` field (for string or hex format respectively)
* Type (used for pydantic): `uuid.UUID`
`uuid_format` parameters allow 'hex'(default) or 'string' values.
Depending on the format either 32 or 36 char is used in the database.
Sample:
* 'hex' format value = `c616ab438cce49dbbf4380d109251dce` (CHAR(32))
* 'string' value = `c616ab43-8cce-49db-bf43-80d109251dce` (CHAR(36))
When loaded it's always python UUID so you can compare it and compare two formats values between each other.
### Enum
There are two ways to use enums in ormar -> one is a dedicated `Enum` field that uses `sqlalchemy.Enum` column type, while the other is setting `choices` on any field in ormar.
The Enum field uses the database dialect specific Enum column type if it's available, but fallback to varchar if this field type is not available.
The `choices` option always respect the database field type selected.
So which one to use depends on the backend you use and on the column/ data type you want in your Enum field.
#### Enum - Field
`Enum(enum_class=Type[Enum])` has a required `enum_class` parameter.
* Sqlalchemy column: `sqlalchemy.Enum`
* Type (used for pydantic): `Type[Enum]`
[relations]: ../relations/index.md
[queries]: ../queries.md
[pydantic]: https://pydantic-docs.helpmanual.io/usage/schema/#field-customisation
[server default]: https://docs.sqlalchemy.org/en/13/core/defaults.html#server-invoked-ddl-explicit-default-expressions
collerek-ormar-c09209a/docs/fields/pydantic-fields.md 0000664 0000000 0000000 00000011733 15130200524 0022555 0 ustar 00root root 0000000 0000000 # Pydantic only fields
Ormar allows you to declare normal `pydantic` fields in its model, so you have access to
all basic and custom pydantic fields like `str`, `int`, `HttpUrl`, `PaymentCardNumber` etc.
You can even declare fields leading to nested pydantic only Models, not only single fields.
Since those fields are not stored in database (that's the whole point of those fields),
you have to provide a meaningful value for them, either by setting a default one or
providing one during model initialization.
If `ormar` cannot resolve the value for pydantic field it will fail during loading data from the database,
with missing required value for declared pydantic field.
Options to provide a value are described below.
Of course you can combine few or all of them in one model.
## Optional field
If you set a field as `Optional`, it defaults to `None` if not provided and that's
exactly what's going to happen during loading from database.
```python
base_ormar_config = ormar.OrmarConfig(
metadata=sqlalchemy.MetaData(),
database=databases.Database(DATABASE_URL),
)
class ModelTest(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
number: Optional[PaymentCardNumber]
test = ModelTest(name="Test")
assert test.name == "Test"
assert test.number is None
test.number = "123456789015"
await test.save()
test_check = await ModelTest.objects.get()
assert test_check.name == "Test"
# after load it's back to None
assert test_check.number is None
```
## Field with default value
By setting a default value, this value will be set on initialization and database load.
Note that setting a default to `None` is the same as setting the field to `Optional`.
```python
base_ormar_config = ormar.OrmarConfig(
metadata=sqlalchemy.MetaData(),
database=databases.Database(DATABASE_URL),
)
class ModelTest(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
url: HttpUrl = "https://www.example.com"
test = ModelTest(name="Test")
assert test.name == "Test"
assert test.url == "https://www.example.com"
test.url = "https://www.sdta.ada.pt"
assert test.url == "https://www.sdta.ada.pt"
await test.save()
test_check = await ModelTest.objects.get()
assert test_check.name == "Test"
# after load it's back to default
assert test_check.url == "https://www.example.com"
```
## Default factory function
By setting a `default_factory` function, this result of the function call will be set
on initialization and each database load.
```python
from pydantic import Field, PaymentCardNumber
# ...
base_ormar_config = ormar.OrmarConfig(
metadata=sqlalchemy.MetaData(),
database=databases.Database(DATABASE_URL),
)
CARD_NUMBERS = [
"123456789007",
"123456789015",
"123456789023",
"123456789031",
"123456789049",
]
def get_number():
return random.choice(CARD_NUMBERS)
class ModelTest2(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
# note that you do not call the function, just pass reference
number: PaymentCardNumber = Field(default_factory=get_number)
# note that you still CAN provide a value
test = ModelTest2(name="Test2", number="4000000000000002")
assert test.name == "Test2"
assert test.number == "4000000000000002"
await test.save()
test_check = await ModelTest2.objects.get()
assert test_check.name == "Test2"
# after load value is set to be one of the CARD_NUMBERS
assert test_check.number in CARD_NUMBERS
assert test_check.number != test.number
```
## Custom setup in `__init__`
You can provide a value for the field in your `__init__()` method before calling a `super()` init method.
```python
from pydantic import BaseModel
# ...
base_ormar_config = ormar.OrmarConfig(
metadata=sqlalchemy.MetaData(),
database=databases.Database(DATABASE_URL),
)
class PydanticTest(BaseModel):
aa: str
bb: int
class ModelTest3(ormar.Model):
ormar_config = base_ormar_config.copy()
# provide your custom init function
def __init__(self, **kwargs):
# add value for required field without default value
kwargs["pydantic_test"] = PydanticTest(aa="random", bb=42)
# remember to call ormar.Model init!
super().__init__(**kwargs)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
pydantic_test: PydanticTest
test = ModelTest3(name="Test3")
assert test.name == "Test3"
assert test.pydantic_test.bb == 42
test.pydantic.aa = "new value"
assert test.pydantic.aa == "new value"
await test.save()
test_check = await ModelTest3.objects.get()
assert test_check.name == "Test3"
# after load it's back to value provided in init
assert test_check.pydantic_test.aa == "random"
```
!!!warning
If you do not provide a value in one of the above ways `ValidationError` will be raised on load from database.
collerek-ormar-c09209a/docs/gen_ref_pages.py 0000664 0000000 0000000 00000001643 15130200524 0021043 0 ustar 00root root 0000000 0000000 """Generate the code reference pages and navigation."""
from pathlib import Path
import mkdocs_gen_files
nav = mkdocs_gen_files.Nav()
for path in sorted(Path("ormar").rglob("*.py")):
module_path = path.relative_to(".").with_suffix("")
doc_path = path.relative_to("ormar").with_suffix(".md")
full_doc_path = Path("api", doc_path)
parts = tuple(module_path.parts)
if parts[-1] == "__init__":
parts = parts[:-1]
doc_path = doc_path.with_name("index.md")
full_doc_path = full_doc_path.with_name("index.md")
elif parts[-1] == "__main__":
continue
nav[parts] = str(doc_path)
with mkdocs_gen_files.open(full_doc_path, "w") as fd:
ident = ".".join(parts)
fd.write(f"::: {ident}")
mkdocs_gen_files.set_edit_path(full_doc_path, path)
with mkdocs_gen_files.open("api/SUMMARY.md", "w") as nav_file:
nav_file.writelines(nav.build_literate_nav())
collerek-ormar-c09209a/docs/index.md 0000664 0000000 0000000 00000062616 15130200524 0017345 0 ustar 00root root 0000000 0000000 # ormar
### Overview
The `ormar` package is an async ORM for Python, with support for **Postgres,
MySQL**, and **SQLite**.
The main benefits of using `ormar` are:
* getting an **async ORM that can be used with async frameworks** (fastapi, starlette etc.)
* getting just **one model to maintain** - you don't have to maintain pydantic and other orm models (sqlalchemy, peewee, gino etc.)
The goal was to create a simple ORM that can be **used directly (as request and response models) with [`fastapi`][fastapi]** that bases it's data validation on pydantic.
Ormar - apart from the obvious "ORM" in name - gets its name from _ormar_ in Swedish which means _snakes_, and _ormar(e)_ in Croatian which means _cabinet_.
And what's a better name for python ORM than snakes cabinet :)
**If you like ormar remember to star the repository in [github](https://github.com/collerek/ormar)!**
The bigger community we build, the easier it will be to catch bugs and attract contributors ;)
### Documentation
Check out the [documentation][documentation] for details.
**Note that for brevity most of the documentation snippets omit the creation of the database
and scheduling the execution of functions for asynchronous run.**
If you want more real life examples than in the documentation you can see the [tests][tests] folder,
since they actually have to create and connect to a database in most of the tests.
Yet remember that those are - well - tests and not all solutions are suitable to be used in real life applications.
### Part of the `fastapi` ecosystem
As part of the fastapi ecosystem `ormar` is supported in selected libraries that somehow work with databases.
Ormar remains sql dialect agnostic - so only columns working in all supported backends are implemented.
It's relatively easy to implement columns for specific dialects as an extensions of ormar.
Postgres specific columns implementation: [`ormar-postgres-extensions`](https://github.com/tophat/ormar-postgres-extensions)
If you maintain or use a different library and would like it to support `ormar` let us know how we can help.
### Dependencies
Ormar is built with:
* [`sqlalchemy core`][sqlalchemy-core] for query building.
* [`databases`][databases] for cross-database async support.
* [`pydantic`][pydantic] for data validation.
### License
`ormar` is built as open-source software and will remain completely free (MIT license).
As I write open-source code to solve everyday problems in my work or to promote and build strong python
community you can say thank you and buy me a coffee or sponsor me with a monthly amount to help ensure my work remains free and maintained.
### Migrating from `sqlalchemy` and existing databases
If you currently use `sqlalchemy` and would like to switch to `ormar` check out the auto-translation
tool that can help you with translating existing sqlalchemy orm models so you do not have to do it manually.
**Beta** versions available at github: [`sqlalchemy-to-ormar`](https://github.com/collerek/sqlalchemy-to-ormar)
or simply `pip install sqlalchemy-to-ormar`
`sqlalchemy-to-ormar` can be used in pair with `sqlacodegen` to auto-map/ generate `ormar` models from existing database, even if you don't use `sqlalchemy` for your project.
### Migrations & Database creation
Because ormar is built on SQLAlchemy core, you can use [`alembic`][alembic] to provide
database migrations (and you really should for production code).
For tests and basic applications the `sqlalchemy` is more than enough:
```python
# note this is just a partial snippet full working example below
# 1. Imports
import sqlalchemy
import databases
import ormar
# 2. Initialization
DATABASE_URL = "sqlite:///db.sqlite"
base_ormar_config = ormar.OrmarConfig(
metadata=sqlalchemy.MetaData(),
database=databases.Database(DATABASE_URL),
engine=sqlalchemy.create_engine(DATABASE_URL),
)
# Define models here
# 3. Database creation and tables creation
base_ormar_config.metadata.create_all(engine)
```
For a sample configuration of alembic and more information regarding migrations and
database creation visit [migrations][migrations] documentation section.
### Package versions
**ormar is still under development:**
We recommend pinning any dependencies (with i.e. `ormar~=0.9.1`)
`ormar` also follows the release numeration that breaking changes bump the major number,
while other changes and fixes bump minor number, so with the latter you should be safe to
update, yet always read the [releases][releases] docs before.
`example: (0.5.2 -> 0.6.0 - breaking, 0.5.2 -> 0.5.3 - non breaking)`.
### Asynchronous Python
Note that `ormar` is an asynchronous ORM, which means that you have to `await` the calls to
the methods, that are scheduled for execution in an event loop. Python has a builtin module
[`asyncio`][asyncio] that allows you to do just that.
Note that most "normal" python interpreters do not allow execution of `await`
outside of a function (because you actually schedule this function for delayed execution
and don't get the result immediately).
In a modern web framework (like `fastapi`), the framework will handle this for you, but if
you plan to do this on your own you need to perform this manually like described in the
quick start below.
### Quick Start
Note that you can find the same script in examples folder on github.
```python
from typing import Optional
import databases
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///db.sqlite"
base_ormar_config = ormar.OrmarConfig(
metadata=sqlalchemy.MetaData(),
database=databases.Database(DATABASE_URL),
engine = sqlalchemy.create_engine(DATABASE_URL),
)
# note that this step is optional -> all ormar cares is a field with name
# ormar_config # and proper parameters, but this way you do not have to repeat
# the same parameters if you use only one database
#
# Note that all type hints are optional
# below is a perfectly valid model declaration
# class Author(ormar.Model):
# ormar_config = base_ormar_config.copy(tablename="authors")
#
# id = ormar.Integer(primary_key=True) # <= notice no field types
# name = ormar.String(max_length=100)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
# create the database
# note that in production you should use migrations
# note that this is not required if you connect to existing database
# just to be sure we clear the db before
base_ormar_config.metadata.drop_all(engine)
base_ormar_config.metadata.create_all(engine)
# all functions below are divided into functionality categories
# note how all functions are defined with async - hence can use await AND needs to
# be awaited on their own
async def create():
# Create some records to work with through QuerySet.create method.
# Note that queryset is exposed on each Model's class as objects
tolkien = await Author.objects.create(name="J.R.R. Tolkien")
await Book.objects.create(author=tolkien, title="The Hobbit", year=1937)
await Book.objects.create(author=tolkien, title="The Lord of the Rings", year=1955)
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
# alternative creation of object divided into 2 steps
sapkowski = Author(name="Andrzej Sapkowski")
# do some stuff
await sapkowski.save()
# or save() after initialization
await Book(author=sapkowski, title="The Witcher", year=1990).save()
await Book(author=sapkowski, title="The Tower of Fools", year=2002).save()
# to read more about inserting data into the database
# visit: https://collerek.github.io/ormar/queries/create/
async def read():
# Fetch an instance, without loading a foreign key relationship on it.
# Django style
book = await Book.objects.get(title="The Hobbit")
# or python style
book = await Book.objects.get(Book.title == "The Hobbit")
book2 = await Book.objects.first()
# first() fetch the instance with lower primary key value
assert book == book2
# you can access all fields on loaded model
assert book.title == "The Hobbit"
assert book.year == 1937
# when no condition is passed to get()
# it behaves as last() based on primary key column
book3 = await Book.objects.get()
assert book3.title == "The Tower of Fools"
# When you have a relation, ormar always defines a related model for you
# even when all you loaded is a foreign key value like in this example
assert isinstance(book.author, Author)
# primary key is populated from foreign key stored in books table
assert book.author.pk == 1
# since the related model was not loaded all other fields are None
assert book.author.name is None
# Load the relationship from the database when you already have the related model
# alternatively see joins section below
await book.author.load()
assert book.author.name == "J.R.R. Tolkien"
# get all rows for given model
authors = await Author.objects.all()
assert len(authors) == 2
# to read more about reading data from the database
# visit: https://collerek.github.io/ormar/queries/read/
async def update():
# read existing row from db
tolkien = await Author.objects.get(name="J.R.R. Tolkien")
assert tolkien.name == "J.R.R. Tolkien"
tolkien_id = tolkien.id
# change the selected property
tolkien.name = "John Ronald Reuel Tolkien"
# call update on a model instance
await tolkien.update()
# confirm that object was updated
tolkien = await Author.objects.get(name="John Ronald Reuel Tolkien")
assert tolkien.name == "John Ronald Reuel Tolkien"
assert tolkien.id == tolkien_id
# alternatively update data without loading
await Author.objects.filter(name__contains="Tolkien").update(name="J.R.R. Tolkien")
# to read more about updating data in the database
# visit: https://collerek.github.io/ormar/queries/update/
async def delete():
silmarillion = await Book.objects.get(year=1977)
# call delete() on instance
await silmarillion.delete()
# alternatively delete without loading
await Book.objects.delete(title="The Tower of Fools")
# note that when there is no record ormar raises NoMatch exception
try:
await Book.objects.get(year=1977)
except ormar.NoMatch:
print("No book from 1977!")
# to read more about deleting data from the database
# visit: https://collerek.github.io/ormar/queries/delete/
# note that despite the fact that record no longer exists in database
# the object above is still accessible and you can use it (and i.e. save()) again.
tolkien = silmarillion.author
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
async def joins():
# Tho join two models use select_related
# Django style
book = await Book.objects.select_related("author").get(title="The Hobbit")
# Python style
book = await Book.objects.select_related(Book.author).get(
Book.title == "The Hobbit"
)
# now the author is already prefetched
assert book.author.name == "J.R.R. Tolkien"
# By default you also get a second side of the relation
# constructed as lowercase source model name +'s' (books in this case)
# you can also provide custom name with parameter related_name
# Django style
author = await Author.objects.select_related("books").all(name="J.R.R. Tolkien")
# Python style
author = await Author.objects.select_related(Author.books).all(
Author.name == "J.R.R. Tolkien"
)
assert len(author[0].books) == 3
# for reverse and many to many relations you can also prefetch_related
# that executes a separate query for each of related models
# Django style
author = await Author.objects.prefetch_related("books").get(name="J.R.R. Tolkien")
# Python style
author = await Author.objects.prefetch_related(Author.books).get(
Author.name == "J.R.R. Tolkien"
)
assert len(author.books) == 3
# to read more about relations
# visit: https://collerek.github.io/ormar/relations/
# to read more about joins and subqueries
# visit: https://collerek.github.io/ormar/queries/joins-and-subqueries/
async def filter_and_sort():
# to filter the query you can use filter() or pass key-value pars to
# get(), all() etc.
# to use special methods or access related model fields use double
# underscore like to filter by the name of the author use author__name
# Django style
books = await Book.objects.all(author__name="J.R.R. Tolkien")
# python style
books = await Book.objects.all(Book.author.name == "J.R.R. Tolkien")
assert len(books) == 3
# filter can accept special methods also separated with double underscore
# to issue sql query ` where authors.name like "%tolkien%"` that is not
# case sensitive (hence small t in Tolkien)
# Django style
books = await Book.objects.filter(author__name__icontains="tolkien").all()
# python style
books = await Book.objects.filter(Book.author.name.icontains("tolkien")).all()
assert len(books) == 3
# to sort use order_by() function of queryset
# to sort decreasing use hyphen before the field name
# same as with filter you can use double underscores to access related fields
# Django style
books = (
await Book.objects.filter(author__name__icontains="tolkien")
.order_by("-year")
.all()
)
# python style
books = (
await Book.objects.filter(Book.author.name.icontains("tolkien"))
.order_by(Book.year.desc())
.all()
)
assert len(books) == 3
assert books[0].title == "The Silmarillion"
assert books[2].title == "The Hobbit"
# to read more about filtering and ordering
# visit: https://collerek.github.io/ormar/queries/filter-and-sort/
async def subset_of_columns():
# to exclude some columns from loading when querying the database
# you can use fileds() method
hobbit = await Book.objects.fields(["title"]).get(title="The Hobbit")
# note that fields not included in fields are empty (set to None)
assert hobbit.year is None
assert hobbit.author is None
# selected field is there
assert hobbit.title == "The Hobbit"
# alternatively you can provide columns you want to exclude
hobbit = await Book.objects.exclude_fields(["year"]).get(title="The Hobbit")
# year is still not set
assert hobbit.year is None
# but author is back
assert hobbit.author is not None
# also you cannot exclude primary key column - it's always there
# even if you EXPLICITLY exclude it it will be there
# note that each model have a shortcut for primary_key column which is pk
# and you can filter/access/set the values by this alias like below
assert hobbit.pk is not None
# note that you cannot exclude fields that are not nullable
# (required) in model definition
try:
await Book.objects.exclude_fields(["title"]).get(title="The Hobbit")
except pydantic.ValidationError:
print("Cannot exclude non nullable field title")
# to read more about selecting subset of columns
# visit: https://collerek.github.io/ormar/queries/select-columns/
async def pagination():
# to limit number of returned rows use limit()
books = await Book.objects.limit(1).all()
assert len(books) == 1
assert books[0].title == "The Hobbit"
# to offset number of returned rows use offset()
books = await Book.objects.limit(1).offset(1).all()
assert len(books) == 1
assert books[0].title == "The Lord of the Rings"
# alternatively use paginate that combines both
books = await Book.objects.paginate(page=2, page_size=2).all()
assert len(books) == 2
# note that we removed one book of Sapkowski in delete()
# and recreated The Silmarillion - by default when no order_by is set
# ordering sorts by primary_key column
assert books[0].title == "The Witcher"
assert books[1].title == "The Silmarillion"
# to read more about pagination and number of rows
# visit: https://collerek.github.io/ormar/queries/pagination-and-rows-number/
async def aggregations():
# count:
assert 2 == await Author.objects.count()
# exists
assert await Book.objects.filter(title="The Hobbit").exists()
# maximum
assert 1990 == await Book.objects.max(columns=["year"])
# minimum
assert 1937 == await Book.objects.min(columns=["year"])
# average
assert 1964.75 == await Book.objects.avg(columns=["year"])
# sum
assert 7859 == await Book.objects.sum(columns=["year"])
# to read more about aggregated functions
# visit: https://collerek.github.io/ormar/queries/aggregations/
async def raw_data():
# extract raw data in a form of dicts or tuples
# note that this skips the validation(!) as models are
# not created from parsed data
# get list of objects as dicts
assert await Book.objects.values() == [
{"id": 1, "author": 1, "title": "The Hobbit", "year": 1937},
{"id": 2, "author": 1, "title": "The Lord of the Rings", "year": 1955},
{"id": 4, "author": 2, "title": "The Witcher", "year": 1990},
{"id": 5, "author": 1, "title": "The Silmarillion", "year": 1977},
]
# get list of objects as tuples
assert await Book.objects.values_list() == [
(1, 1, "The Hobbit", 1937),
(2, 1, "The Lord of the Rings", 1955),
(4, 2, "The Witcher", 1990),
(5, 1, "The Silmarillion", 1977),
]
# filter data - note how you always get a list
assert await Book.objects.filter(title="The Hobbit").values() == [
{"id": 1, "author": 1, "title": "The Hobbit", "year": 1937}
]
# select only wanted fields
assert await Book.objects.filter(title="The Hobbit").values(["id", "title"]) == [
{"id": 1, "title": "The Hobbit"}
]
# if you select only one column you could flatten it with values_list
assert await Book.objects.values_list("title", flatten=True) == [
"The Hobbit",
"The Lord of the Rings",
"The Witcher",
"The Silmarillion",
]
# to read more about extracting raw values
# visit: https://collerek.github.io/ormar/queries/aggregations/
async def with_connect(function):
# note that for any other backend than sqlite you actually need to
# connect to the database to perform db operations
async with database:
await function()
# note that if you use framework like `fastapi` you shouldn't connect
# in your endpoints but have a global connection pool
# check https://collerek.github.io/ormar/fastapi/ and section with db connection
# gather and execute all functions
# note - normally import should be at the beginning of the file
import asyncio
# note that normally you use gather() function to run several functions
# concurrently but we actually modify the data and we rely on the order of functions
for func in [
create,
read,
update,
delete,
joins,
filter_and_sort,
subset_of_columns,
pagination,
aggregations,
raw_data,
]:
print(f"Executing: {func.__name__}")
asyncio.run(with_connect(func))
# drop the database tables
metadata.drop_all(engine)
```
## Ormar Specification
### QuerySet methods
* `create(**kwargs): -> Model`
* `get(*args, **kwargs): -> Model`
* `get_or_none(*args, **kwargs): -> Optional[Model]`
* `get_or_create(_defaults: Optional[Dict[str, Any]] = None, *args, **kwargs) -> Tuple[Model, bool]`
* `first(*args, **kwargs): -> Model`
* `update(each: bool = False, **kwargs) -> int`
* `update_or_create(**kwargs) -> Model`
* `bulk_create(objects: List[Model]) -> None`
* `bulk_update(objects: List[Model], columns: List[str] = None) -> None`
* `delete(*args, each: bool = False, **kwargs) -> int`
* `all(*args, **kwargs) -> List[Optional[Model]]`
* `iterate(*args, **kwargs) -> AsyncGenerator[Model]`
* `filter(*args, **kwargs) -> QuerySet`
* `exclude(*args, **kwargs) -> QuerySet`
* `select_related(related: Union[List, str]) -> QuerySet`
* `prefetch_related(related: Union[List, str]) -> QuerySet`
* `limit(limit_count: int) -> QuerySet`
* `offset(offset: int) -> QuerySet`
* `count(distinct: bool = True) -> int`
* `exists() -> bool`
* `max(columns: List[str]) -> Any`
* `min(columns: List[str]) -> Any`
* `avg(columns: List[str]) -> Any`
* `sum(columns: List[str]) -> Any`
* `fields(columns: Union[List, str, set, dict]) -> QuerySet`
* `exclude_fields(columns: Union[List, str, set, dict]) -> QuerySet`
* `order_by(columns:Union[List, str]) -> QuerySet`
* `values(fields: Union[List, str, Set, Dict])`
* `values_list(fields: Union[List, str, Set, Dict])`
#### Relation types
* One to many - with `ForeignKey(to: Model)`
* Many to many - with `ManyToMany(to: Model, Optional[through]: Model)`
#### Model fields types
Available Model Fields (with required args - optional ones in docs):
* `String(max_length)`
* `Text()`
* `Boolean()`
* `Integer()`
* `Float()`
* `Date()`
* `Time()`
* `DateTime()`
* `JSON()`
* `BigInteger()`
* `SmallInteger()`
* `Decimal(scale, precision)`
* `UUID()`
* `LargeBinary(max_length)`
* `Enum(enum_class)`
* `Enum` like Field - by passing `choices` to any other Field type
* `EncryptedString` - by passing `encrypt_secret` and `encrypt_backend`
* `ForeignKey(to)`
* `ManyToMany(to)`
### Available fields options
The following keyword arguments are supported on all field types.
* `primary_key: bool`
* `nullable: bool`
* `default: Any`
* `server_default: Any`
* `index: bool`
* `unique: bool`
* `name: str`
All fields are required unless one of the following is set:
* `nullable` - Creates a nullable column. Sets the default to `False`. Read the fields common parameters for details.
* `sql_nullable` - Used to set different setting for pydantic and the database. Sets the default to `nullable` value. Read the fields common parameters for details.
* `default` - Set a default value for the field. **Not available for relation fields**
* `server_default` - Set a default value for the field on server side (like sqlalchemy's `func.now()`). **Not available for relation fields**
* `primary key` with `autoincrement` - When a column is set to primary key and autoincrement is set on this column.
Autoincrement is set by default on int primary keys.
### Available signals
Signals allow to trigger your function for a given event on a given Model.
* `pre_save`
* `post_save`
* `pre_update`
* `post_update`
* `pre_delete`
* `post_delete`
* `pre_relation_add`
* `post_relation_add`
* `pre_relation_remove`
* `post_relation_remove`
[sqlalchemy-core]: https://docs.sqlalchemy.org/en/latest/core/
[databases]: https://github.com/encode/databases
[pydantic]: https://pydantic-docs.helpmanual.io/
[encode/orm]: https://github.com/encode/orm/
[alembic]: https://alembic.sqlalchemy.org/en/latest/
[fastapi]: https://fastapi.tiangolo.com/
[documentation]: https://collerek.github.io/ormar/
[migrations]: https://collerek.github.io/ormar/models/migrations/
[asyncio]: https://docs.python.org/3/library/asyncio.html
[releases]: https://collerek.github.io/ormar/releases/
[tests]: https://github.com/collerek/ormar/tree/master/tests
collerek-ormar-c09209a/docs/install.md 0000664 0000000 0000000 00000002251 15130200524 0017671 0 ustar 00root root 0000000 0000000 ## Installation
Installation is as simple as:
```py
pip install ormar
```
### Dependencies
Ormar uses `databases` for connectivity issues, `pydantic` for validation and `sqlalchemy-core` for queries.
All three should install along the installation of ormar if not present at your system before.
* databases
* pydantic
* sqlalchemy
The required versions are pinned in the pyproject.toml file.
## Optional dependencies
*ormar* has three optional dependencies based on database backend you use:
### Database backend
#### Postgresql
```py
pip install ormar[postgresql]
```
Will install also `asyncpg` and `psycopg2`.
#### Mysql
```py
pip install ormar[mysql]
```
Will install also `aiomysql` and `pymysql`.
#### Sqlite
```py
pip install ormar[sqlite]
```
Will install also `aiosqlite`.
### Orjson
```py
pip install ormar[orjson]
```
Will install also `orjson` that is much faster than builtin json parser.
### Crypto
```py
pip install ormar[crypto]
```
Will install also `cryptography` that is required to work with encrypted columns.
### Manual installation of dependencies
Of course, you can also install these requirements manually with `pip install asyncpg` etc.
collerek-ormar-c09209a/docs/migration.md 0000664 0000000 0000000 00000021460 15130200524 0020217 0 ustar 00root root 0000000 0000000 # Migration to 0.20.0 based on pydantic 2.X.X
Version 0.20.0 provides support for pydantic v2.X.X that provides significant speed boost (validation and serialization is written in rust) and cleaner api for developers,
at the same time it drops support for pydantic v.1.X.X. There are changes in `ormar` interface corresponding to changes made in `pydantic`.
## Breaking changes
Migration to version >= 0.20.0 requires several changes in order to work properly.
## `ormar` Model configuration
Instead of defining a `Meta` class now each of the ormar models require an ormar_config parameter that is an instance of the `OrmarConfig` class.
Note that the attribute must be named `ormar_config` and be an instance of the config class.
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
# ormar < 0.20
class Album(ormar.Model):
class Meta:
database = database
metadata = metadata
tablename = "albums"
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
# ormar >= 0.20
class AlbumV20(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="albums_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
```
### `OrmarConfig` api/ parameters
The `ormar_config` expose the same set of settings as `Meta` class used to provide.
That means that you can use any of the following parameters initializing the config:
```python
metadata: Optional[sqlalchemy.MetaData]
database: Optional[databases.Database]
engine: Optional[sqlalchemy.engine.Engine]
tablename: Optional[str]
order_by: Optional[List[str]]
abstract: bool
queryset_class: Type[QuerySet]
extra: Extra
constraints: Optional[List[ColumnCollectionConstraint]]
```
### `BaseMeta` equivalent - best practice
Note that to reduce the duplication of code and ease of development it's still recommended to create a base config and provide each of the models with a copy.
OrmarConfig provides a convenient `copy` method for that purpose.
The `copy` method accepts the same parameters as `OrmarConfig` init, so you can overwrite if needed, but by default it will return already existing attributes, except for: `tablename`, `order_by` and `constraints` which by default are cleared.
```python hl_lines="5-8 11 20"
import databases
import ormar
import sqlalchemy
base_ormar_config = ormar.OrmarConfig(
database=databases.Database("sqlite:///db.sqlite"),
metadata=sqlalchemy.MetaData()
)
class AlbumV20(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="albums_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class TrackV20(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="tracks_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
```
## `choices` Field parameter is no longer supported.
Before version 0.20 you could provide `choices` parameter to any existing ormar Field to limit the accepted values.
This functionality was dropped, and you should use `ormar.Enum` field that was designed for this purpose.
If you want to keep the database field type (i.e. an Integer field) you can always write a custom validator.
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
# ormar < 0.20
class Artist(ormar.Model):
class Meta:
database = database
metadata = metadata
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
country: str = ormar.String(default=False, max_length=50, choices=["UK", "US", "Vietnam", "Colombia"])
# ormar >= 0.20
from enum import Enum
class Country(str, Enum):
UK = "UK"
US = "US"
VIETNAM = "Vietnam"
COLOMBIA = "Colombia"
class ArtistV20(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="artists_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
country: Country = ormar.Enum(enum_class=Country)
```
## `pydantic_only` Field parameter is no longer supported
`pydantic_only` fields were already deprecated and are removed in v 0.20. Ormar allows defining pydantic fields as in ordinary pydantic model.
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
# ormar < 0.20
class Dish(ormar.Model):
class Meta:
database = database
metadata = metadata
tablename = "dishes"
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
cook: str = ormar.String(max_length=40, pydantic_only=True, default="sam")
# ormar >= 0.20
class DishV20(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="dishes_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
cook: str = "sam" # this is normal pydantic field
```
## `property_field` decorator is no longer supported
`property_field` decorator was used to provide a way to pass calculated fields that were included in dictionary/ serialized json representation of the model.
Version 2.X of pydantic introduced such a possibility, so you should now switch to the one native to the pydantic.
```python
import databases
import ormar
import sqlalchemy
import pydantic
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
# ormar < 0.20
class Employee(ormar.Model):
class Meta:
database = database
metadata = metadata
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=100)
last_name: str = ormar.String(max_length=100)
@ormar.property_field()
def full_name(self) -> str:
return f"{self.first_name} {self.last_name}"
# ormar >= 0.20
class EmployeeV20(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=100)
last_name: str = ormar.String(max_length=100)
@pydantic.computed_field()
def full_name(self) -> str:
return f"{self.first_name} {self.last_name}"
```
## Deprecated methods
All methods listed below are deprecated and will be removed in version 0.30 of `ormar`.
### `dict()` becomes the `model_dump()`
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="albums"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
album = Album(name="Dark Side of the Moon")
# ormar < 0.20
album_dict = album.dict()
# ormar >= 0.20
new_album_dict = album.model_dump()
```
Note that parameters remain the same i.e. `include`, `exclude` etc.
### `json()` becomes the `model_dump_json()`
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="albums"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
album = Album(name="Dark Side of the Moon")
# ormar < 0.20
album_json= album.json()
# ormar >= 0.20
new_album_dict = album.model_dump_json()
```
Note that parameters remain the same i.e. `include`, `exclude` etc.
### `construct()` becomes the `model_construct()`
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="albums"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
params = {
"name": "Dark Side of the Moon",
"favorite": True,
}
# ormar < 0.20
album = Album.construct(**params)
# ormar >= 0.20
album = Album.model_construct(**params)
```
To read more about construct please refer to `pydantic` documentation. collerek-ormar-c09209a/docs/models/ 0000775 0000000 0000000 00000000000 15130200524 0017164 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs/models/index.md 0000664 0000000 0000000 00000040464 15130200524 0020625 0 ustar 00root root 0000000 0000000 # Models
## Defining models
By defining an ormar Model you get corresponding **Pydantic model** as well as **Sqlalchemy table** for free.
They are being managed in the background and you do not have to create them on your own.
### Model Class
To build an ormar model you simply need to inherit a `ormar.Model` class.
```Python hl_lines="9"
--8<-- "../docs_src/models/docs001.py"
```
### Defining Fields
Next assign one or more of the [Fields][fields] as a class level variables.
#### Basic Field Types
Each table **has to** have a primary key column, which you specify by setting `primary_key=True` on selected field.
Only one primary key column is allowed.
```Python hl_lines="15-17"
--8<-- "../docs_src/models/docs001.py"
```
!!! warning
Not assigning `primary_key` column or assigning more than one column per `Model` will raise `ModelDefinitionError`
exception.
By default if you assign primary key to `Integer` field, the `autoincrement` option is set to true.
You can disable by passing `autoincrement=False`.
```Python
id: int = ormar.Integer(primary_key=True, autoincrement=False)
```
#### Non Database Fields
Note that if you need a normal pydantic field in your model (used to store value on model or pass around some value) you can define a
field like usual in pydantic.
Fields created like this are added to the `pydantic` model fields -> so are subject to validation according to `Field` type,
also appear in `model_dump()` and `model_dump_json()` result.
The difference is that **those fields are not saved in the database**. So they won't be included in underlying sqlalchemy `columns`,
or `table` variables (check [Internals][Internals] section below to see how you can access those if you need).
Subsequently, pydantic fields won't be included in migrations or any database operation (like `save`, `update` etc.)
Fields like those can be passed around into payload in `fastapi` request and will be returned in `fastapi` response
(of course only if you set their value somewhere in your code as the value is **not** fetched from the db.
If you pass a value in `fastapi` `request` and return the same instance that `fastapi` constructs for you in `request_model`
you should get back exactly same value in `response`.).
!!!warning
pydantic fields have to be always **Optional** and it cannot be changed (otherwise db load validation would fail)
```Python hl_lines="19"
--8<-- "../docs_src/models/docs014.py"
```
If you set pydantic field with `default` parameter and do not pass actual value in request you will always get default value.
Since it can be a function you can set `default=datetime.datetime.now` and get current timestamp each time you call an endpoint etc.
#### Non Database Fields in Fastapi
!!!note
Note, that both pydantic and calculated_fields decorated field can be included/excluded in both `model_dump()` and `fastapi`
response with `include`/`exclude` and `response_model_include`/`response_model_exclude` accordingly.
```python
# <==part of related code removed for clarity==>
base_ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
engine=sqlalchemy.create_engine(DATABASE_URL),
)
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users2")
id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(max_length=255, nullable=False)
password: str = ormar.String(max_length=255)
first_name: str = ormar.String(max_length=255)
last_name: str = ormar.String(max_length=255)
category: str = ormar.String(max_length=255, nullable=True)
timestamp: datetime.datetime = pydantic.Field(
default=datetime.datetime.now
)
# <==part of related code removed for clarity==>
app = FastAPI()
@app.post("/users/")
async def create_user(user: User):
return await user.save()
# <==part of related code removed for clarity==>
def test_excluding_fields_in_endpoints():
client = TestClient(app)
with client as client:
timestamp = datetime.datetime.now()
user = {
"email": "test@domain.com",
"password": "^*^%A*DA*IAAA",
"first_name": "John",
"last_name": "Doe",
"timestamp": str(timestamp),
}
response = client.post("/users/", json=user)
assert list(response.json().keys()) == [
"id",
"email",
"first_name",
"last_name",
"category",
"timestamp",
]
# returned is the same timestamp
assert response.json().get("timestamp") == str(timestamp).replace(" ", "T")
# <==part of related code removed for clarity==>
```
#### Fields names vs Column names
By default names of the fields will be used for both the underlying `pydantic` model and `sqlalchemy` table.
If for whatever reason you prefer to change the name in the database but keep the name in the model you can do this
with specifying `name` parameter during Field declaration
Here you have a sample model with changed names
```Python hl_lines="18-21"
--8<-- "../docs_src/models/docs008.py"
```
Note that you can also change the ForeignKey column name
```Python hl_lines="34"
--8<-- "../docs_src/models/docs009.py"
```
But for now you cannot change the ManyToMany column names as they go through other Model anyway.
```Python hl_lines="43"
--8<-- "../docs_src/models/docs010.py"
```
### Overwriting the default QuerySet
If you want to customize the queries run by ormar you can define your own queryset class (that extends the ormar `QuerySet`) in your model class, default one is simply the `QuerySet`
You can provide a new class in `ormar_config` of your class as `queryset_class` parameter.
```python
import ormar
from ormar.queryset.queryset import QuerySet
from fastapi import HTTPException
class MyQuerySetClass(QuerySet):
async def first_or_404(self, *args, **kwargs):
entity = await self.get_or_none(*args, **kwargs)
if entity is None:
# in fastapi or starlette
raise HTTPException(404)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(
queryset_class=MyQuerySetClass,
tablename="book",
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=32)
# when book not found, raise `404` in your view.
book = await Book.objects.first_or_404(name="123")
```
### Type Hints
Note that for better IDE support and mypy checks you can provide type hints.
```Python hl_lines="15-17"
--8<-- "../docs_src/models/docs001.py"
```
Note that type hints are **optional** so perfectly valid `ormar` code can look like this:
```Python hl_lines="15-17"
--8<-- "../docs_src/models/docs012.py"
```
!!!warning
Even if you use type hints **`ormar` does not use them to construct `pydantic` fields!**
Type hints are there only to support static checkers and linting,
`ormar` construct annotations used by `pydantic` from own fields.
### Dependencies
Since ormar depends on [`databases`][databases] and [`sqlalchemy-core`][sqlalchemy-core] for database connection
and table creation you need to assign each `Model` with two special parameters.
#### Databases
One is `Database` instance created with your database url in [sqlalchemy connection string][sqlalchemy connection string] format.
Created instance needs to be passed to every `Model` with `ormar_config` object `database` parameter.
```Python hl_lines="1 5 11"
--8<-- "../docs_src/models/docs001.py"
```
!!! tip
You need to create the `Database` instance **only once** and use it for all models.
You can create several ones if you want to use multiple databases.
#### Sqlalchemy
Second dependency is sqlalchemy `MetaData` instance.
Created instance needs to be passed to every `Model` with `ormar_config` object `metadata` parameter.
```Python hl_lines="3 6 12"
--8<-- "../docs_src/models/docs001.py"
```
!!! tip
You need to create the `MetaData` instance **only once** and use it for all models.
You can create several ones if you want to use multiple databases.
#### Best practice
Note that `ormar` expects the field with name `ormar_config` that is an instance of `OrmarConfig` class.
To ease the config management, the `OrmarConfig` class provide `copy` method.
So instead of providing the same parameters over and over again for all models
you should create a base object and use its copy in all models.
```Python hl_lines="9-12 19 28"
--8<-- "../docs_src/models/docs013.py"
```
### Table Names
By default table name is created from Model class name as lowercase name plus 's'.
You can overwrite this parameter by providing `ormar_config` object's `tablename` argument.
```Python hl_lines="14-16"
--8<-- "../docs_src/models/docs002.py"
```
### Constraints
On a model level you can also set model-wise constraints on sql columns.
Right now only `IndexColumns`, `UniqueColumns` and `CheckColumns` constraints are supported.
!!!note
Note that both constraints should be used only if you want to set a name on constraint or want to set the index on multiple columns, otherwise `index` and `unique` properties on ormar fields are preferred.
!!!tip
To read more about columns constraints like `primary_key`, `unique`, `ForeignKey` etc. visit [fields][fields].
#### UniqueColumns
You can set this parameter by providing `ormar_config` object `constraints` argument.
```Python hl_lines="13-16"
--8<-- "../docs_src/models/docs006.py"
```
!!!note
Note that constraints are meant for combination of columns that should be unique.
To set one column as unique use [`unique`](../fields/common-parameters.md#unique) common parameter.
Of course you can set many columns as unique with this param but each of them will be checked separately.
#### IndexColumns
You can set this parameter by providing `ormar_config` object `constraints` argument.
```Python hl_lines="13-16"
--8<-- "../docs_src/models/docs017.py"
```
!!!note
Note that constraints are meant for combination of columns that should be in the index.
To set one column index use [`unique`](../fields/common-parameters.md#index) common parameter.
Of course, you can set many columns as indexes with this param but each of them will be a separate index.
#### CheckColumns
You can set this parameter by providing `ormar_config` object `constraints` argument.
```Python hl_lines="15-20"
--8<-- "../docs_src/models/docs018.py"
```
!!!note
Note that some databases do not actively support check constraints (such as MySQL).
### Pydantic configuration
As each `ormar.Model` is also a `pydantic` model, you might want to tweak the settings of the pydantic configuration.
The way to do this in pydantic is to adjust the settings on the `model_config` dictionary provided to your model, and it works exactly the same for ormar models.
So in order to set your own preferences you need to provide not only the `ormar_config` class but also the `model_config = ConfigDict()` class to your model.
!!!note
To read more about available settings visit the [pydantic](https://pydantic-docs.helpmanual.io/usage/model_config/) config page.
Note that if you do not provide your own configuration, ormar will do it for you.
The default config provided is as follows:
```python
model_config = ConfigDict(validate_assignment=True, ser_json_bytes="base64")
```
So to overwrite setting or provide your own a sample model can look like following:
```Python hl_lines="16"
--8<-- "../docs_src/models/docs016.py"
```
### Extra fields in models
By default `ormar` forbids you to pass extra fields to Model.
If you try to do so the `ModelError` will be raised.
Since the extra fields cannot be saved in the database the default to disallow such fields seems a feasible option.
On the contrary in `pydantic` the default option is to ignore such extra fields, therefore `ormar` provides an `ormar_config.extra` setting to behave in the same way.
To ignore extra fields passed to `ormar` set this setting to `Extra.ignore` instead of default `Extra.forbid`.
Note that `ormar` does not allow accepting extra fields, you can only ignore them or forbid them (raise exception if present)
```python
from ormar import Extra, OrmarConfig
class Child(ormar.Model):
ormar_config = OrmarConfig(
tablename="children",
extra=Extra.ignore # set extra setting to prevent exceptions on extra fields presence
)
id: int = ormar.Integer(name="child_id", primary_key=True)
first_name: str = ormar.String(name="fname", max_length=100)
last_name: str = ormar.String(name="lname", max_length=100)
```
To set the same setting on all model check the [best practices]("../models/index/#best-practice") and `base_ormar_config` concept.
## Model sort order
When querying the database with given model by default the Model is ordered by the `primary_key`
column ascending. If you wish to change the default behaviour you can do it by providing `orders_by`
parameter to model `ormar_config` object.
Sample default ordering (not specified - so by primary key):
```python
base_ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
# default sort by column id ascending
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="authors",
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
```
Modified
```python hl_lines="9"
base_ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
# now default sort by name descending
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(
orders_by = ["-name"],
tablename="authors",
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
```
## Model Initialization
There are two ways to create and persist the `Model` instance in the database.
If you plan to modify the instance in the later execution of your program you can initiate your `Model` as a normal class and later await a `save()` call.
```Python hl_lines="25-26"
--8<-- "../docs_src/models/docs007.py"
```
If you want to initiate your `Model` and at the same time save in in the database use a QuerySet's method `create()`.
For creating multiple objects at once a `bulk_create()` QuerySet's method is available.
Each model has a `QuerySet` initialised as `objects` parameter
```Python hl_lines="28"
--8<-- "../docs_src/models/docs007.py"
```
!!!info
To read more about `QuerySets` (including bulk operations) and available methods visit [queries][queries]
## `Model` save status
Each model instance is a separate python object and they do not know anything about each other.
```python
track1 = await Track.objects.get(name='The Bird')
track2 = await Track.objects.get(name='The Bird')
assert track1 == track2 # True
track1.name = 'The Bird2'
await track1.save()
assert track1.name == track2.name # False
# track2 does not update and knows nothing about track1
```
The objects itself have a saved status, which is set as following:
* Model is saved after `save/update/load/upsert` method on model
* Model is saved after `create/get/first/all/get_or_create/update_or_create` method
* Model is saved when passed to `bulk_update` and `bulk_create`
* Model is saved after `adding/removing` `ManyToMany` related objects (through model instance auto saved/deleted)
* Model is **not** saved after change of any own field (including `pk` as `Model.pk` alias)
* Model is **not** saved after adding/removing `ForeignKey` related object (fk column not saved)
* Model is **not** saved after instantiation with `__init__` (w/o `QuerySet.create` or before calling `save`)
You can check if model is saved with `ModelInstance.saved` property
[fields]: ../fields/field-types.md
[relations]: ../relations/index.md
[queries]: ../queries/index.md
[pydantic]: https://pydantic-docs.helpmanual.io/
[sqlalchemy-core]: https://docs.sqlalchemy.org/en/latest/core/
[sqlalchemy-metadata]: https://docs.sqlalchemy.org/en/13/core/metadata.html
[databases]: https://github.com/encode/databases
[sqlalchemy connection string]: https://docs.sqlalchemy.org/en/13/core/engines.html#database-urls
[sqlalchemy table creation]: https://docs.sqlalchemy.org/en/13/core/metadata.html#creating-and-dropping-database-tables
[alembic]: https://alembic.sqlalchemy.org/en/latest/tutorial.html
[save status]: ../models/index/#model-save-status
[Internals]: ../models/internals.md
collerek-ormar-c09209a/docs/models/inheritance.md 0000664 0000000 0000000 00000040577 15130200524 0022014 0 ustar 00root root 0000000 0000000 # Inheritance
Out of various types of ORM models inheritance `ormar` currently supports two of them:
* **Mixins**
* **Concrete table inheritance** (with parents set to `abstract=True`)
## Types of inheritance
The short summary of different types of inheritance:
* **Mixins [SUPPORTED]** - don't subclass `ormar.Model`, just define fields that are
later used on different models (like `created_date` and `updated_date` on each model),
only actual models create tables, but those fields from mixins are added
* **Concrete table inheritance [SUPPORTED]** - means that parent is marked as abstract
and each child has its own table with columns from a parent and own child columns, kind
of similar to Mixins but parent also is a Model
* **Single table inheritance [NOT SUPPORTED]** - means that only one table is created
with fields that are combination/sum of the parent and all children models but child
models use only subset of column in db (all parent and own ones, skipping the other
children ones)
* **Multi/ Joined table inheritance [NOT SUPPORTED]** - means that part of the columns
is saved on parent model and part is saved on child model that are connected to each
other by kind of one to one relation and under the hood you operate on two models at
once
* **Proxy models [NOT SUPPORTED]** - means that only parent has an actual table,
children just add methods, modify settings etc.
## Mixins
To use Mixins just define a class that is not inheriting from an `ormar.Model` but is
defining `ormar.Fields` as class variables.
```python
base_ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
engine=sqlalchemy.create_engine(DATABASE_URL),
)
# a mixin defines the fields but is a normal python class
class AuditMixin:
created_by: str = ormar.String(max_length=100)
updated_by: str = ormar.String(max_length=100, default="Sam")
class DateFieldsMixins:
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
updated_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
# a models can inherit from one or more mixins
class Category(ormar.Model, DateFieldsMixins, AuditMixin):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
code: int = ormar.Integer()
```
!!!tip
Note that Mixins are **not** models, so you still need to inherit
from `ormar.Model` as well as define `ormar_config` field in the **final** model.
A Category class above will have four additional fields: `created_date`, `updated_date`,
`created_by` and `updated_by`.
There will be only one table created for model `Category` (`categories`), with `Category` class fields
combined with all `Mixins` fields.
Note that `Mixin` in class name is optional but is a good python practice.
## Concrete table inheritance
In concept concrete table inheritance is very similar to Mixins, but uses
actual `ormar.Models` as base classes.
!!!warning
Note that base classes have `abstract=True` set in `ormar_config` object, if you try
to inherit from non abstract marked class `ModelDefinitionError` will be raised.
Since this abstract Model will never be initialized you can skip `metadata`
and `database` in it's `ormar_config` definition.
But if you provide it - it will be inherited, that way you do not have to
provide `metadata` and `databases` in the final/concrete class
Note that you can always overwrite it in child/concrete class if you need to.
More over at least one of the classes in inheritance chain have to provide both `database` and `metadata` -
otherwise an error will be raised.
```python
# note that base classes have abstract=True
# since this model will never be initialized you can skip metadata and database
class AuditModel(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
created_by: str = ormar.String(max_length=100)
updated_by: str = ormar.String(max_length=100, default="Sam")
# but if you provide it it will be inherited - DRY (Don't Repeat Yourself) in action
class DateFieldsModel(ormar.Model):
ormar_config = base_ormar_config.copy(
abstract=True,
metadata=metadata,
database=db,
)
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
updated_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
# that way you do not have to provide metadata and databases in concrete class
class Category(DateFieldsModel, AuditModel):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
code: int = ormar.Integer()
```
The list of inherited options/settings is as follows: `metadata`, `database`
and `constraints`.
Of course apart from that all fields from base classes are combined and created in the
concrete table of the final Model.
!!!tip
Note how you don't have to provide `abstarct=False` in the final class - it's the default setting
that is not inherited.
## Redefining fields in subclasses
Note that you can redefine previously created fields like in normal python class
inheritance.
Whenever you define a field with same name and new definition it will completely replace
the previously defined one.
```python hl_lines="28"
# base class
class DateFieldsModel(ormar.Model):
ormar_config = OrmarConfig(
abstract=True,
metadata=metadata,
database=db,
# note that UniqueColumns need sqlalchemy db columns names not the ormar ones
constraints=[ormar.UniqueColumns("creation_date", "modification_date")]
)
created_date: datetime.datetime = ormar.DateTime(
default=datetime.datetime.now, name="creation_date"
)
updated_date: datetime.datetime = ormar.DateTime(
default=datetime.datetime.now, name="modification_date"
)
class RedefinedField(DateFieldsModel):
ormar_config = OrmarConfig(
tablename="redefines",
metadata=metadata,
database=db,
)
id: int = ormar.Integer(primary_key=True)
# here the created_date is replaced by the String field
created_date: str = ormar.String(max_length=200, name="creation_date")
# you can verify that the final field is correctly declared and created
changed_field = RedefinedField.ormar_config.model_fields["created_date"]
assert changed_field.default is None
assert changed_field.alias == "creation_date"
assert any(x.name == "creation_date" for x in RedefinedField.ormar_config.table.columns)
assert isinstance(
RedefinedField.ormar_config.table.columns["creation_date"].type,
sqlalchemy.sql.sqltypes.String,
)
```
!!!warning
If you declare `UniqueColumns` constraint with column names, the final model **has to have**
a column with the same name declared. Otherwise, the `ModelDefinitionError` will be raised.
So in example above if you do not provide `name` for `created_date` in `RedefinedField` model
ormar will complain.
`created_date: str = ormar.String(max_length=200) # exception`
`created_date: str = ormar.String(max_length=200, name="creation_date2") # exception`
## Relations in inheritance
You can declare relations in every step of inheritance, so both in parent and child
classes.
When you define a relation on a child model level it's either overwriting the relation
defined in parent model (if the same field name is used), or is accessible only to this
child if you define a new relation.
When inheriting relations, you always need to be aware of `related_name` parameter, that
has to be unique across a related model, when you define multiple child classes that inherit the
same relation.
If you do not provide `related_name` parameter ormar calculates it for you. This works
with inheritance as all child models have to have different class names, which are used
to calculate the default `related_name` (class.name.lower()+'s').
But, if you provide a `related_name` this name cannot be reused in all child models as
they would overwrite each other on the related model side.
Therefore, you have two options:
* redefine relation field in child models and manually provide different `related_name`
parameters
* let this for `ormar` to handle -> auto adjusted related_name are: original
related_name + "_" + child model **table** name
That might sound complicated but let's look at the following example:
### ForeignKey relations
```python
# normal model used in relation
class Person(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
# parent model - needs to be abstract
class Car(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
owner: Person = ormar.ForeignKey(Person)
# note that we refer to the Person model again so we **have to** provide related_name
co_owner: Person = ormar.ForeignKey(Person, related_name="coowned")
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class Truck(Car):
ormar_config = base_ormar_config.copy()
max_capacity: int = ormar.Integer()
class Bus(Car):
ormar_config = base_ormar_config.copy(tablename="buses")
max_persons: int = ormar.Integer()
```
Now when you will inspect the fields on Person model you will get:
```python
Person.ormar_config.model_fields
"""
{'id': ,
'name': ,
'trucks': ,
'coowned_trucks': ,
'buss': ,
'coowned_buses': }
"""
```
Note how you have `trucks` and `buss` fields that leads to Truck and Bus class that
this Person owns. There were no `related_name` parameter so default names were used.
At the same time the co-owned cars need to be referenced by `coowned_trucks`
and `coowned_buses`. Ormar appended `_trucks` and `_buses` suffixes taken from child
model table names.
Seems fine, but the default name for owned trucks is ok (`trucks`) but the `buss` is
ugly, so how can we change it?
The solution is pretty simple - just redefine the field in Bus class and provide
different `related_name` parameter.
```python
# rest of the above example remains the same
class Bus(Car):
ormar_config = base_ormar_config.copy(tablename="buses")
# new field that changes the related_name
owner: Person = ormar.ForeignKey(Person, related_name="buses")
max_persons: int = ormar.Integer()
```
Now the columns looks much better.
```python
Person.ormar_config.model_fields
"""
{'id': ,
'name': ,
'trucks': ,
'coowned_trucks': ,
'buses': ,
'coowned_buses': }
"""
```
!!!note
You could also provide `related_name` for the `owner` field, that way the proper suffixes
would be added.
`owner: Person = ormar.ForeignKey(Person, related_name="owned")`
and model fields for Person owned cars would become `owned_trucks` and `owned_buses`.
### ManyToMany relations
Similarly, you can inherit from Models that have ManyToMany relations declared but
there is one, but substantial difference - the Through model.
Since the Through model will be able to hold additional fields, and now it links only two Tables
(`from` and `to` ones), each child that inherits the m2m relation field has to have separate
Through model.
Of course, you can overwrite the relation in each Child model, but that requires additional
code and undermines the point of the whole inheritance. `Ormar` will handle this for you if
you agree with default naming convention, which you can always manually overwrite in
children if needed.
Again, let's look at the example to easier grasp the concepts.
We will modify the previous example described above to use m2m relation for co_owners.
```python
# person remain the same as above
class Person(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
# new through model between Person and Car2
class PersonsCar(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cars_x_persons")
# note how co_owners is now ManyToMany relation
class Car2(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
# note the related_name - needs to be unique across Person
# model, regardless of how many different models leads to Person
owner: Person = ormar.ForeignKey(Person, related_name="owned")
co_owners: List[Person] = ormar.ManyToMany(
Person, through=PersonsCar, related_name="coowned"
)
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
# child models define only additional Fields
class Truck2(Car2):
ormar_config = base_ormar_config.copy(tablename="trucks2")
max_capacity: int = ormar.Integer()
class Bus2(Car2):
ormar_config = base_ormar_config.copy(tablename="buses2")
max_persons: int = ormar.Integer()
```
`Ormar` automatically modifies related_name of the fields to include the **table** name
of the children models. The default name is original related_name + '_' + child table name.
That way for class Truck2 the relation defined in
`owner: Person = ormar.ForeignKey(Person, related_name="owned")` becomes `owned_trucks2`
You can verify the names by inspecting the list of fields present on `Person` model.
```python
Person.ormar_config.model_fields
{
# note how all relation fields need to be unique on Person
# regardless if autogenerated or manually overwritten
'id': ,
'name': ,
# note that we expanded on previous example so all 'old' fields are here
'trucks': ,
'coowned_trucks': ,
'buses': ,
'coowned_buses': ,
# newly defined related fields
'owned_trucks2': ,
'coowned_trucks2': ,
'owned_buses2': ,
'coowned_buses2':
}
```
But that's not all. It's kind of internal to `ormar` but affects the data structure in the database,
so let's examine the through models for both `Bus2` and `Truck2` models.
```python
Bus2.ormar_config.model_fields['co_owners'].through
Bus2.ormar_config.model_fields['co_owners'].through.ormar_config.tablename
'cars_x_persons_buses2'
Truck2.ormar_config.model_fields['co_owners'].through
Truck2.ormar_config.model_fields['co_owners'].through.ormar_config.tablename
'cars_x_persons_trucks2'
```
As you can see above `ormar` cloned the Through model for each of the Child classes and added
Child **class** name at the end, while changing the table names of the cloned fields
the name of the **table** from the child is used.
Note that original model is not only not used, the table for this model is removed from metadata:
```python
Bus2.ormar_config.metadata.tables.keys()
dict_keys(['test_date_models', 'categories', 'subjects', 'persons', 'trucks', 'buses',
'cars_x_persons_trucks2', 'trucks2', 'cars_x_persons_buses2', 'buses2'])
```
So be aware that if you introduce inheritance along the way and convert a model into
abstract parent model you may lose your data on through table if not careful.
!!!note
Note that original table name and model name of the Through model is never used.
Only the cloned models tables are created and used.
!!!warning
Note that each subclass of the Model that has `ManyToMany` relation defined generates
a new `Through` model, meaning also **new database table**.
That means that each time you define a Child model you need to either manually create
the table in the database, or run a migration (with alembic).
collerek-ormar-c09209a/docs/models/internals.md 0000664 0000000 0000000 00000004332 15130200524 0021507 0 ustar 00root root 0000000 0000000 # Internals
Apart from special parameters defined in the `Model` during definition (tablename, metadata etc.) the `Model` provides you with useful internals.
## Pydantic Model
All `Model` classes inherit from `pydantic.BaseModel` so you can access all normal attributes of pydantic models.
For example to list pydantic model fields you can:
```Python hl_lines="20"
--8<-- "../docs_src/models/docs003.py"
```
!!!tip
Note how the primary key `id` field is optional as `Integer` primary key by default has `autoincrement` set to `True`.
!!!info
For more options visit official [pydantic][pydantic] documentation.
## Sqlalchemy Table
To access auto created sqlalchemy table you can use `Model.ormar_config.table` parameter
For example to list table columns you can:
```Python hl_lines="24"
--8<-- "../docs_src/models/docs004.py"
```
!!!tip
You can access table primary key name by `Course.ormar_config.pkname`
!!!info
For more options visit official [sqlalchemy-metadata][sqlalchemy-metadata] documentation.
## Fields Definition
To access ormar `Fields` you can use `Model.ormar_config.model_fields` parameter
For example to list table model fields you can:
```Python hl_lines="22"
--8<-- "../docs_src/models/docs005.py"
```
!!!info
Note that fields stored on a model are `classes` not `instances`.
So if you print just model fields you will get:
`{'id': , `
`'name': , `
`'completed': }`
[fields]: ./fields.md
[relations]: ./relations/index.md
[queries]: ./queries.md
[pydantic]: https://pydantic-docs.helpmanual.io/
[sqlalchemy-core]: https://docs.sqlalchemy.org/en/latest/core/
[sqlalchemy-metadata]: https://docs.sqlalchemy.org/en/13/core/metadata.html
[databases]: https://github.com/encode/databases
[sqlalchemy connection string]: https://docs.sqlalchemy.org/en/13/core/engines.html#database-urls
[sqlalchemy table creation]: https://docs.sqlalchemy.org/en/13/core/metadata.html#creating-and-dropping-database-tables
[alembic]: https://alembic.sqlalchemy.org/en/latest/tutorial.html
[save status]: ../models/#model-save-status
[Internals]: #internals
collerek-ormar-c09209a/docs/models/methods.md 0000664 0000000 0000000 00000057203 15130200524 0021160 0 ustar 00root root 0000000 0000000 # Model methods
!!!tip
Main interaction with the databases is exposed through a `QuerySet` object exposed on
each model as `Model.objects` similar to the django orm.
To read more about **quering, joining tables, excluding fields etc. visit [queries][queries] section.**
Each model instance have a set of methods to `save`, `update` or `load` itself.
Available methods are described below.
## `pydantic` methods
Note that each `ormar.Model` is also a `pydantic.BaseModel`, so all `pydantic` methods are also available on a model,
especially `model_dump()` and `model_dump_json()` methods that can also accept `exclude`, `include` and other parameters.
To read more check [pydantic][pydantic] documentation
## model_construct()
`model_construct` is a raw equivalent of `__init__` method used for construction of new instances.
The difference is that `model_construct` skips validations, so it should be used when you know that data is correct and can be trusted.
The benefit of using construct is the speed of execution due to skipped validation.
!!!note
Note that in contrast to `pydantic.model_construct` method - the `ormar` equivalent will also process the nested related models.
!!!warning
Bear in mind that due to skipped validation the `construct` method does not perform any conversions, checks etc.
So it's your responsibility to provide that data that is valid and can be consumed by the database.
The only two things that construct still performs are:
* Providing a `default` value for not set fields
* Initialize nested ormar models if you pass a dictionary or a primary key value
## model_dump()
`model_dump` is a method inherited from `pydantic`, yet `ormar` adds its own parameters and has some nuances when working with default values,
therefore it's listed here for clarity.
`model_dump` as the name suggests export data from model tree to dictionary.
Explanation of model_dump parameters:
### include (`ormar` modified)
`include: Union[Set, Dict] = None`
Set or dictionary of field names to include in returned dictionary.
Note that `pydantic` has an uncommon pattern of including/ excluding fields in lists (so also nested models) by an index.
And if you want to exclude the field in all children you need to pass a `__all__` key to dictionary.
You cannot exclude nested models in `Set`s in `pydantic` but you can in `ormar`
(by adding double underscore on relation name i.e. to exclude name of category for a book you can use `exclude={"book__category__name"}`)
`ormar` does not support by index exclusion/ inclusions and accepts a simplified and more user-friendly notation.
To check how you can include/exclude fields, including nested fields check out [fields](../queries/select-columns.md#fields) section that has an explanation and a lot of samples.
!!!note
The fact that in `ormar` you can exclude nested models in sets, you can exclude from a whole model tree in `response_model_exclude` and `response_model_include` in fastapi!
### exclude (`ormar` modified)
`exclude: Union[Set, Dict] = None`
Set or dictionary of field names to exclude in returned dictionary.
Note that `pydantic` has an uncommon pattern of including/ excluding fields in lists (so also nested models) by an index.
And if you want to exclude the field in all children you need to pass a `__all__` key to dictionary.
You cannot exclude nested models in `Set`s in `pydantic` but you can in `ormar`
(by adding double underscore on relation name i.e. to exclude name of category for a book you cen use `exclude={"book__category__name"}`)
`ormar` does not support by index exclusion/ inclusions and accepts a simplified and more user-friendly notation.
To check how you can include/exclude fields, including nested fields check out [fields](../queries/select-columns.md#fields) section that has an explanation and a lot of samples.
!!!note
The fact that in `ormar` you can exclude nested models in sets, you can exclude from a whole model tree in `response_model_exclude` and `response_model_include` in fastapi!
### exclude_unset
`exclude_unset: bool = False`
Flag indicates whether fields which were not explicitly set when creating the model should be excluded from the returned dictionary.
!!!warning
Note that after you save data into database each field has its own value -> either provided by you, default, or `None`.
That means that when you load the data from database, **all** fields are set, and this flag basically stop working!
```python
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="Test")
visibility: bool = ormar.Boolean(default=True)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
price: float = ormar.Float(default=9.99)
categories: List[Category] = ormar.ManyToMany(Category)
category = Category(name="Test 2")
assert category.model_dump() == {'id': None, 'items': [], 'name': 'Test 2',
'visibility': True}
assert category.model_dump(exclude_unset=True) == {'items': [], 'name': 'Test 2'}
await category.save()
category2 = await Category.objects.get()
assert category2.model_dump() == {'id': 1, 'items': [], 'name': 'Test 2',
'visibility': True}
# NOTE how after loading from db all fields are set explicitly
# as this is what happens when you populate a model from db
assert category2.model_dump(exclude_unset=True) == {'id': 1, 'items': [],
'name': 'Test 2', 'visibility': True}
```
### exclude_defaults
`exclude_defaults: bool = False`
Flag indicates are equal to their default values (whether set or otherwise) should be excluded from the returned dictionary
```python
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="Test")
visibility: bool = ormar.Boolean(default=True)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
price: float = ormar.Float(default=9.99)
categories: List[Category] = ormar.ManyToMany(Category)
category = Category()
# note that Integer pk is by default autoincrement so optional
assert category.model_dump() == {'id': None, 'items': [], 'name': 'Test', 'visibility': True}
assert category.model_dump(exclude_defaults=True) == {'items': []}
# save and reload the data
await category.save()
category2 = await Category.objects.get()
assert category2.model_dump() == {'id': 1, 'items': [], 'name': 'Test', 'visibility': True}
assert category2.model_dump(exclude_defaults=True) == {'id': 1, 'items': []}
```
### exclude_none
`exclude_none: bool = False`
Flag indicates whether fields which are equal to `None` should be excluded from the returned dictionary.
```python
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="Test", nullable=True)
visibility: bool = ormar.Boolean(default=True)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
price: float = ormar.Float(default=9.99)
categories: List[Category] = ormar.ManyToMany(Category)
category = Category(name=None)
assert category.model_dump() == {'id': None, 'items': [], 'name': None,
'visibility': True}
# note the id is not set yet so None and excluded
assert category.model_dump(exclude_none=True) == {'items': [], 'visibility': True}
await category.save()
category2 = await Category.objects.get()
assert category2.model_dump() == {'id': 1, 'items': [], 'name': None,
'visibility': True}
assert category2.model_dump(exclude_none=True) == {'id': 1, 'items': [],
'visibility': True}
```
### exclude_primary_keys (`ormar` only)
`exclude_primary_keys: bool = False`
Setting flag to `True` will exclude all primary key columns in a tree, including nested models.
```python
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
item1 = Item(id=1, name="Test Item")
assert item1.model_dump() == {"id": 1, "name": "Test Item"}
assert item1.model_dump(exclude_primary_keys=True) == {"name": "Test Item"}
```
### exclude_through_models (`ormar` only)
`exclude_through_models: bool = False`
`Through` models are auto added for every `ManyToMany` relation, and they hold additional parameters on linking model/table.
Setting the `exclude_through_models=True` will exclude all through models, including Through models of submodels.
```python
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
categories: List[Category] = ormar.ManyToMany(Category)
# tree defining the models
item_dict = {
"name": "test",
"categories": [{"name": "test cat"}, {"name": "test cat2"}],
}
# save whole tree
await Item(**item_dict).save_related(follow=True, save_all=True)
# get the saved values
item = await Item.objects.select_related("categories").get()
# by default you can see the through models (itemcategory)
assert item.model_dump() == {'id': 1, 'name': 'test',
'categories': [
{'id': 1, 'name': 'test cat',
'itemcategory': {'id': 1, 'category': None, 'item': None}},
{'id': 2, 'name': 'test cat2',
'itemcategory': {'id': 2, 'category': None, 'item': None}}
]}
# you can exclude those fields/ models
assert item.model_dump(exclude_through_models=True) == {
'id': 1, 'name': 'test',
'categories': [
{'id': 1, 'name': 'test cat'},
{'id': 2, 'name': 'test cat2'}
]}
```
## model_dump_json()
`model_dump_json()` has exactly the same parameters as `model_dump()` so check above.
Of course the end result is a string with json representation and not a dictionary.
## get_pydantic()
`get_pydantic(include: Union[Set, Dict] = None, exclude: Union[Set, Dict] = None)`
This method allows you to generate `pydantic` models from your ormar models without you needing to retype all the fields.
Note that if you have nested models, it **will generate whole tree of pydantic models for you!** but in a way that prevents cyclic references issues.
Moreover, you can pass `exclude` and/or `include` parameters to keep only the fields that you want to, including in nested models.
That means that this way you can effortlessly create pydantic models for requests and responses in `fastapi`.
!!!Note
To read more about possible excludes/includes and how to structure your exclude dictionary or set visit [fields](../queries/select-columns.md#fields) section of documentation
Given sample ormar models like follows:
```python
base_ormar_config = ormar.OrmarConfig(
metadata=sqlalchemy.MetaData(),
database=databases.Database(DATABASE_URL, force_rollback=True),
)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="test")
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
```
You can generate pydantic models out of it with a one simple call.
```python
PydanticCategory = Category.get_pydantic(include={"id", "name"})
```
Which will generate model equivalent of:
```python
class Category(BaseModel):
id: Optional[int]
name: Optional[str] = "test"
```
!!!warning
Note that it's not a good practice to have several classes with same name in one module, as well as it would break `fastapi` docs.
Thats's why ormar adds random 3 uppercase letters to the class name. In example above it means that in reality class would be named i.e. `Category_XIP(BaseModel)`.
To exclude or include nested fields you can use dict or double underscores.
```python
# both calls are equivalent
PydanticCategory = Category.get_pydantic(include={"id", "items__id"})
PydanticCategory = Category.get_pydantic(include={"id": ..., "items": {"id"}})
```
and results in a generated structure as follows:
```python
class Item(BaseModel):
id: Optional[int]
class Category(BaseModel):
id: Optional[int]
items: Optional[List[Item]]
```
Of course, you can use also deeply nested structures and ormar will generate it's pydantic equivalent for you (in a way that exclude loops).
Note how `Item` model above does not have a reference to `Category` although in ormar the relation is bidirectional (and `ormar.Item` has `categories` field).
!!!warning
Note that the generated pydantic model will inherit all **field** validators from the original `ormar` model, that includes the ormar choices validator as well as validators defined with `pydantic.validator` decorator.
But, at the same time all root validators present on `ormar` models will **NOT** be copied to the generated pydantic model. Since root validator can operate on all fields and a user can exclude some fields during generation of pydantic model it's not safe to copy those validators.
If required, you need to redefine/ manually copy them to generated pydantic model.
## load()
By default, when you query a table without prefetching related models, the ormar will still construct
your related models, but populate them only with the pk value. You can load the related model by calling `load()` method.
`load()` can also be used to refresh the model from the database (if it was changed by some other process).
```python
track = await Track.objects.get(name='The Bird')
track.album.pk # will return malibu album pk (1)
track.album.name # will return None
# you need to actually load the data first
await track.album.load()
track.album.name # will return 'Malibu'
```
## load_all()
`load_all(follow: bool = False, exclude: Union[List, str, Set, Dict] = None) -> Model`
Method works like `load()` but also goes through all relations of the `Model` on which the method is called,
and reloads them from database.
By default, the `load_all` method loads only models that are directly related (one step away) to the model on which the method is called.
But you can specify the `follow=True` parameter to traverse through nested models and load all of them in the relation tree.
!!!warning
To avoid circular updates with `follow=True` set, `load_all` keeps a set of already visited Models,
and won't perform nested `loads` on Models that were already visited.
So if you have a diamond or circular relations types you need to perform the loads in a manual way.
```python
# in example like this the second Street (coming from City) won't be load_all, so ZipCode won't be reloaded
Street -> District -> City -> Street -> ZipCode
```
Method accepts also optional exclude parameter that works exactly the same as exclude_fields method in `QuerySet`.
That way you can remove fields from related models being refreshed or skip whole related models.
Method performs one database query so it's more efficient than nested calls to `load()` and `all()` on related models.
!!!tip
To read more about `exclude` read [exclude_fields][exclude_fields]
!!!warning
All relations are cleared on `load_all()`, so if you exclude some nested models they will be empty after call.
## save()
`save() -> self`
You can create new models by using `QuerySet.create()` method or by initializing your model as a normal pydantic model
and later calling `save()` method.
`save()` can also be used to persist changes that you made to the model, but only if the primary key is not set or the model does not exist in database.
The `save()` method does not check if the model exists in db, so if it does you will get a integrity error from your selected db backend if trying to save model with already existing primary key.
```python
track = Track(name='The Bird')
await track.save() # will persist the model in database
track = await Track.objects.get(name='The Bird')
await track.save() # will raise integrity error as pk is populated
```
## update()
`update(_columns: List[str] = None, **kwargs) -> self`
You can update models by using `QuerySet.update()` method or by updating your model attributes (fields) and calling `update()` method.
If you try to update a model without a primary key set a `ModelPersistenceError` exception will be thrown.
To persist a newly created model use `save()` or `upsert(**kwargs)` methods.
```python
track = await Track.objects.get(name='The Bird')
await track.update(name='The Bird Strikes Again')
```
To update only selected columns from model into the database provide a list of columns that should be updated to `_columns` argument.
In example:
```python hl_lines="16"
class Movie(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="title")
year: int = ormar.Integer()
profit: float = ormar.Float()
terminator = await Movie(name='Terminator', year=1984, profit=0.078).save()
terminator.name = "Terminator 2"
terminator.year = 1991
terminator.profit = 0.520
# update only name
await terminator.update(_columns=["name"])
# note that terminator instance was not reloaded so
assert terminator.year == 1991
# but once you load the data from db you see it was not updated
await terminator.load()
assert terminator.year == 1984
```
!!!warning
Note that `update()` does not refresh the instance of the Model, so if you change more columns than you pass in `_columns` list your Model instance will have different values than the database!
## upsert()
`upsert(**kwargs) -> self`
It's a proxy to either `save()` or `update(**kwargs)` methods described above.
If the primary key is set -> the `update` method will be called.
If the pk is not set the `save()` method will be called.
```python
track = Track(name='The Bird')
await track.upsert() # will call save as the pk is empty
track = await Track.objects.get(name='The Bird')
await track.upsert(name='The Bird Strikes Again') # will call update as pk is already populated
```
## delete()
You can delete models by using `QuerySet.delete()` method or by using your model and calling `delete()` method.
```python
track = await Track.objects.get(name='The Bird')
await track.delete() # will delete the model from database
```
!!!tip
Note that that `track` object stays the same, only record in the database is removed.
## save_related()
`save_related(follow: bool = False, save_all: bool = False, exclude=Optional[Union[Set, Dict]]) -> None`
Method goes through all relations of the `Model` on which the method is called,
and calls `upsert()` method on each model that is **not** saved.
To understand when a model is saved check [save status][save status] section above.
By default the `save_related` method saved only models that are directly related (one step away) to the model on which the method is called.
But you can specify the `follow=True` parameter to traverse through nested models and save all of them in the relation tree.
By default save_related saves only model that has not `saved` status, meaning that they were modified in current scope.
If you want to force saving all of the related methods use `save_all=True` flag, which will upsert all related models, regardless of their save status.
If you want to skip saving some of the relations you can pass `exclude` parameter.
`Exclude` can be a set of own model relations,
or it can be a dictionary that can also contain nested items.
!!!note
Note that `exclude` parameter in `save_related` accepts only relation fields names, so
if you pass any other fields they will be saved anyway
!!!note
To read more about the structure of possible values passed to `exclude` check `Queryset.fields` method documentation.
!!!warning
To avoid circular updates with `follow=True` set, `save_related` keeps a set of already visited Models on each branch of relation tree,
and won't perform nested `save_related` on Models that were already visited.
So if you have circular relations types you need to perform the updates in a manual way.
Note that with `save_all=True` and `follow=True` you can use `save_related()` to save whole relation tree at once.
Example:
```python
class Department(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
department_name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
course_name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean()
department: Optional[Department] = ormar.ForeignKey(Department)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
courses = ormar.ManyToMany(Course)
to_save = {
"department_name": "Ormar",
"courses": [
{"course_name": "basic1",
"completed": True,
"students": [
{"name": "Jack"},
{"name": "Abi"}
]},
{"course_name": "basic2",
"completed": True,
"students": [
{"name": "Kate"},
{"name": "Miranda"}
]
},
],
}
# initialize whole tree
department = Department(**to_save)
# save all at once (one after another)
await department.save_related(follow=True, save_all=True)
department_check = await Department.objects.select_all(follow=True).get()
to_exclude = {
"id": ...,
"courses": {
"id": ...,
"students": {"id", "studentcourse"}
}
}
# after excluding ids and through models you get exact same payload used to
# construct whole tree
assert department_check.model_dump(exclude=to_exclude) == to_save
```
!!!warning
`save_related()` iterates all relations and all models and upserts() them one by one,
so it will save all models but might not be optimal in regard of number of database queries.
[fields]: ../fields.md
[relations]: ../relations/index.md
[queries]: ../queries/index.md
[pydantic]: https://pydantic-docs.helpmanual.io/
[sqlalchemy-core]: https://docs.sqlalchemy.org/en/latest/core/
[sqlalchemy-metadata]: https://docs.sqlalchemy.org/en/13/core/metadata.html
[databases]: https://github.com/encode/databases
[sqlalchemy connection string]: https://docs.sqlalchemy.org/en/13/core/engines.html#database-urls
[sqlalchemy table creation]: https://docs.sqlalchemy.org/en/13/core/metadata.html#creating-and-dropping-database-tables
[alembic]: https://alembic.sqlalchemy.org/en/latest/tutorial.html
[save status]: ../models/index/#model-save-status
[Internals]: #internals
[exclude_fields]: ../queries/select-columns.md#exclude_fields
collerek-ormar-c09209a/docs/models/migrations.md 0000664 0000000 0000000 00000014065 15130200524 0021670 0 ustar 00root root 0000000 0000000 # Migrations
## Database Initialization
Note that all examples assume that you already have a database.
If that is not the case and you need to create your tables, that's super easy as `ormar` is using sqlalchemy for underlying table construction.
All you have to do is call `create_all()` like in the example below.
```python
import sqlalchemy
# get your database url in sqlalchemy format - same as used with databases instance used in Model definition
engine = sqlalchemy.create_engine("sqlite:///test.db")
# note that this has to be the same metadata that is used in ormar Models definition
metadata.create_all(engine)
```
You can also create single tables, sqlalchemy tables are exposed in `ormar.ormar_config` object.
```python
import sqlalchemy
# get your database url in sqlalchemy format - same as used with databases instance used in Model definition
engine = sqlalchemy.create_engine("sqlite:///test.db")
# Artist is an ormar model from previous examples
Artist.ormar_config.table.create(engine)
```
!!!warning
You need to create the tables only once, so use a python console for that or remove the script from your production code after first use.
## Alembic usage
Likewise as with tables, since we base tables on sqlalchemy for migrations please use [alembic][alembic].
### Initialization
Use command line to reproduce this minimalistic example.
```python
alembic init alembic
alembic revision --autogenerate -m "made some changes"
alembic upgrade head
```
### Sample env.py file
A quick example of alembic migrations should be something similar to:
When you have application structure like:
```
-> app
-> alembic (initialized folder - so run alembic init alembic inside app folder)
-> models (here are the models)
-> __init__.py
-> my_models.py
```
Your `env.py` file (in alembic folder) can look something like:
```python
from logging.config import fileConfig
from sqlalchemy import create_engine
from alembic import context
import sys, os
# add app folder to system path (alternative is running it from parent folder with python -m ...)
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../../')
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here (the one used in ormar)
# for 'autogenerate' support
from app.models.my_models import metadata
target_metadata = metadata
# set your url here or import from settings
# note that by default url is in saved sqlachemy.url variable in alembic.ini file
URL = "sqlite:///test.db"
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
context.configure(
url=URL,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
# if you use UUID field set also this param
# the prefix has to match sqlalchemy import name in alembic
# that can be set by sqlalchemy_module_prefix option (default 'sa.')
user_module_prefix='sa.'
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = create_engine(URL)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata,
# if you use UUID field set also this param
# the prefix has to match sqlalchemy import name in alembic
# that can be set by sqlalchemy_module_prefix option (default 'sa.')
user_module_prefix='sa.'
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
```
### Excluding tables
You can also include/exclude specific tables with `include_object` parameter passed to `context.configure`. That should be a function returning `True/False` for given objects.
A sample function excluding tables starting with `data_` in name unless it's 'data_jobs':
```python
def include_object(object, name, type_, reflected, compare_to):
if name and name.startswith('data_') and name not in ['data_jobs']:
return False
return True
```
!!!note
Function parameters for `include_objects` (you can change the name) are required and defined in alembic
to check what they do check the [alembic][alembic] documentation
And you pass it into context like (both in online and offline):
```python
context.configure(
url=URL,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
user_module_prefix='sa.',
include_object=include_object
)
```
!!!info
You can read more about table creation, altering and migrations in [sqlalchemy table creation][sqlalchemy table creation] documentation.
[fields]: ./fields.md
[relations]: ./relations/index.md
[queries]: ./queries.md
[pydantic]: https://pydantic-docs.helpmanual.io/
[sqlalchemy-core]: https://docs.sqlalchemy.org/en/latest/core/
[sqlalchemy-metadata]: https://docs.sqlalchemy.org/en/13/core/metadata.html
[databases]: https://github.com/encode/databases
[sqlalchemy connection string]: https://docs.sqlalchemy.org/en/13/core/engines.html#database-urls
[sqlalchemy table creation]: https://docs.sqlalchemy.org/en/13/core/metadata.html#creating-and-dropping-database-tables
[alembic]: https://alembic.sqlalchemy.org/en/latest/tutorial.html
[save status]: ../models/index/#model-save-status
[Internals]: #internals
collerek-ormar-c09209a/docs/mypy.md 0000664 0000000 0000000 00000000771 15130200524 0017226 0 ustar 00root root 0000000 0000000 To provide better errors check you should use mypy with pydantic [plugin][plugin]
Please use notation introduced in version 0.4.0.
```Python hl_lines="15-17"
--8<-- "../docs_src/models/docs012.py"
```
Note that above example is not using the type hints, so further operations with mypy might fail, depending on the context.
Preferred notation should look liked this:
```Python hl_lines="15-17"
--8<-- "../docs_src/models/docs001.py"
```
[plugin]: https://pydantic-docs.helpmanual.io/mypy_plugin/ collerek-ormar-c09209a/docs/plugin.md 0000664 0000000 0000000 00000001471 15130200524 0017524 0 ustar 00root root 0000000 0000000 While `ormar` will work with any IDE there is a PyCharm `pydantic` plugin that enhances the user experience for this IDE.
Plugin is available on the JetBrains Plugins Repository for PyCharm: [plugin page][plugin page].
You can install the plugin for free from the plugin marketplace
(PyCharm's Preferences -> Plugin -> Marketplace -> search "pydantic").
!!!note
For plugin to work properly you need to provide valid type hints for model fields.
!!!info
Plugin supports type hints, argument inspection and more but mainly only for __init__ methods
More information can be found on the
[official plugin page](https://plugins.jetbrains.com/plugin/12861-pydantic)
and [github repository](https://github.com/koxudaxi/pydantic-pycharm-plugin).
[plugin page]: https://plugins.jetbrains.com/plugin/12861-pydantic collerek-ormar-c09209a/docs/queries/ 0000775 0000000 0000000 00000000000 15130200524 0017356 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs/queries/aggregations.md 0000664 0000000 0000000 00000024061 15130200524 0022355 0 ustar 00root root 0000000 0000000 # Aggregation functions
Currently 6 aggregation functions are supported.
* `count(distinct: bool = True) -> int`
* `exists() -> bool`
* `sum(columns) -> Any`
* `avg(columns) -> Any`
* `min(columns) -> Any`
* `max(columns) -> Any`
* `QuerysetProxy`
* `QuerysetProxy.count(distinct=True)` method
* `QuerysetProxy.exists()` method
* `QuerysetProxy.sum(columns)` method
* `QuerysetProxy.avg(columns)` method
* `QuerysetProxy.min(column)` method
* `QuerysetProxy.max(columns)` method
## count
`count(distinct: bool = True) -> int`
Returns number of rows matching the given criteria (i.e. applied with `filter` and `exclude`).
If `distinct` is `True` (the default), this will return the number of primary rows selected. If `False`,
the count will be the total number of rows returned
(including extra rows for `one-to-many` or `many-to-many` left `select_related` table joins).
`False` is the legacy (buggy) behavior for workflows that depend on it.
```python
class Book(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
tablename="book"
)
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
author: str = ormar.String(max_length=100)
genre: str = ormar.String(
max_length=100,
default="Fiction",
choices=["Fiction", "Adventure", "Historic", "Fantasy"],
)
```
```python
# returns count of rows in db for Books model
no_of_books = await Book.objects.count()
```
## exists
`exists() -> bool`
Returns a bool value to confirm if there are rows matching the given criteria (applied with `filter` and `exclude`)
```python
class Book(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
tablename="book"
)
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
author: str = ormar.String(max_length=100)
genre: str = ormar.String(
max_length=100,
default="Fiction",
choices=["Fiction", "Adventure", "Historic", "Fantasy"],
)
```
```python
# returns a boolean value if given row exists
has_sample = await Book.objects.filter(title='Sample').exists()
```
## sum
`sum(columns) -> Any`
Returns sum value of columns for rows matching the given criteria (applied with `filter` and `exclude` if set before).
You can pass one or many column names including related columns.
As of now each column passed is aggregated separately (so `sum(col1+col2)` is not possible,
you can have `sum(col1, col2)` and later add 2 returned sums in python)
You cannot `sum` non numeric columns.
If you aggregate on one column, the single value is directly returned as a result
If you aggregate on multiple columns a dictionary with column: result pairs is returned
Given models like follows
```Python
--8<-- "../docs_src/aggregations/docs001.py"
```
A sample usage might look like following
```python
author = await Author(name="Author 1").save()
await Book(title="Book 1", year=1920, ranking=3, author=author).save()
await Book(title="Book 2", year=1930, ranking=1, author=author).save()
await Book(title="Book 3", year=1923, ranking=5, author=author).save()
assert await Book.objects.sum("year") == 5773
result = await Book.objects.sum(["year", "ranking"])
assert result == dict(year=5773, ranking=9)
try:
# cannot sum string column
await Book.objects.sum("title")
except ormar.QueryDefinitionError:
pass
assert await Author.objects.select_related("books").sum("books__year") == 5773
result = await Author.objects.select_related("books").sum(
["books__year", "books__ranking"]
)
assert result == dict(books__year=5773, books__ranking=9)
assert (
await Author.objects.select_related("books")
.filter(books__year__lt=1925)
.sum("books__year")
== 3843
)
```
## avg
`avg(columns) -> Any`
Returns avg value of columns for rows matching the given criteria (applied with `filter` and `exclude` if set before).
You can pass one or many column names including related columns.
As of now each column passed is aggregated separately (so `sum(col1+col2)` is not possible,
you can have `sum(col1, col2)` and later add 2 returned sums in python)
You cannot `avg` non numeric columns.
If you aggregate on one column, the single value is directly returned as a result
If you aggregate on multiple columns a dictionary with column: result pairs is returned
```Python
--8<-- "../docs_src/aggregations/docs001.py"
```
A sample usage might look like following
```python
author = await Author(name="Author 1").save()
await Book(title="Book 1", year=1920, ranking=3, author=author).save()
await Book(title="Book 2", year=1930, ranking=1, author=author).save()
await Book(title="Book 3", year=1923, ranking=5, author=author).save()
assert round(float(await Book.objects.avg("year")), 2) == 1924.33
result = await Book.objects.avg(["year", "ranking"])
assert round(float(result.get("year")), 2) == 1924.33
assert result.get("ranking") == 3.0
try:
# cannot avg string column
await Book.objects.avg("title")
except ormar.QueryDefinitionError:
pass
result = await Author.objects.select_related("books").avg("books__year")
assert round(float(result), 2) == 1924.33
result = await Author.objects.select_related("books").avg(
["books__year", "books__ranking"]
)
assert round(float(result.get("books__year")), 2) == 1924.33
assert result.get("books__ranking") == 3.0
assert (
await Author.objects.select_related("books")
.filter(books__year__lt=1925)
.avg("books__year")
== 1921.5
)
```
## min
`min(columns) -> Any`
Returns min value of columns for rows matching the given criteria (applied with `filter` and `exclude` if set before).
You can pass one or many column names including related columns.
As of now each column passed is aggregated separately (so `sum(col1+col2)` is not possible,
you can have `sum(col1, col2)` and later add 2 returned sums in python)
If you aggregate on one column, the single value is directly returned as a result
If you aggregate on multiple columns a dictionary with column: result pairs is returned
```Python
--8<-- "../docs_src/aggregations/docs001.py"
```
A sample usage might look like following
```python
author = await Author(name="Author 1").save()
await Book(title="Book 1", year=1920, ranking=3, author=author).save()
await Book(title="Book 2", year=1930, ranking=1, author=author).save()
await Book(title="Book 3", year=1923, ranking=5, author=author).save()
assert await Book.objects.min("year") == 1920
result = await Book.objects.min(["year", "ranking"])
assert result == dict(year=1920, ranking=1)
assert await Book.objects.min("title") == "Book 1"
assert await Author.objects.select_related("books").min("books__year") == 1920
result = await Author.objects.select_related("books").min(
["books__year", "books__ranking"]
)
assert result == dict(books__year=1920, books__ranking=1)
assert (
await Author.objects.select_related("books")
.filter(books__year__gt=1925)
.min("books__year")
== 1930
)
```
## max
`max(columns) -> Any`
Returns max value of columns for rows matching the given criteria (applied with `filter` and `exclude` if set before).
Returns min value of columns for rows matching the given criteria (applied with `filter` and `exclude` if set before).
You can pass one or many column names including related columns.
As of now each column passed is aggregated separately (so `sum(col1+col2)` is not possible,
you can have `sum(col1, col2)` and later add 2 returned sums in python)
If you aggregate on one column, the single value is directly returned as a result
If you aggregate on multiple columns a dictionary with column: result pairs is returned
```Python
--8<-- "../docs_src/aggregations/docs001.py"
```
A sample usage might look like following
```python
author = await Author(name="Author 1").save()
await Book(title="Book 1", year=1920, ranking=3, author=author).save()
await Book(title="Book 2", year=1930, ranking=1, author=author).save()
await Book(title="Book 3", year=1923, ranking=5, author=author).save()
assert await Book.objects.max("year") == 1930
result = await Book.objects.max(["year", "ranking"])
assert result == dict(year=1930, ranking=5)
assert await Book.objects.max("title") == "Book 3"
assert await Author.objects.select_related("books").max("books__year") == 1930
result = await Author.objects.select_related("books").max(
["books__year", "books__ranking"]
)
assert result == dict(books__year=1930, books__ranking=5)
assert (
await Author.objects.select_related("books")
.filter(books__year__lt=1925)
.max("books__year")
== 1923
)
```
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes a subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
### count
Works exactly the same as [count](./#count) function above but allows you to select columns from related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### exists
Works exactly the same as [exists](./#exists) function above but allows you to select columns from related
objects from other side of the relation.
### sum
Works exactly the same as [sum](./#sum) function above but allows you to sum columns from related
objects from other side of the relation.
### avg
Works exactly the same as [avg](./#avg) function above but allows you to average columns from related
objects from other side of the relation.
### min
Works exactly the same as [min](./#min) function above but allows you to select minimum of columns from related
objects from other side of the relation.
### max
Works exactly the same as [max](./#max) function above but allows you to select maximum of columns from related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/create.md 0000664 0000000 0000000 00000013361 15130200524 0021147 0 ustar 00root root 0000000 0000000 # Insert data into database
Following methods allow you to insert data into the database.
* `create(**kwargs) -> Model`
* `get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs) -> Tuple[Model, bool]`
* `update_or_create(**kwargs) -> Model`
* `bulk_create(objects: List[Model]) -> None`
* `Model`
* `Model.save()` method
* `Model.upsert()` method
* `Model.save_related()` method
* `QuerysetProxy`
* `QuerysetProxy.create(**kwargs)` method
* `QuerysetProxy.get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs)` method
* `QuerysetProxy.update_or_create(**kwargs)` method
## create
`create(**kwargs): -> Model`
Creates the model instance, saves it in a database and returns the updates model
(with pk populated if not passed and autoincrement is set).
The allowed kwargs are `Model` fields names and proper value types.
```python
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="album"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
```
```python
malibu = await Album.objects.create(name="Malibu")
await Track.objects.create(album=malibu, title="The Bird", position=1)
```
The alternative is a split creation and persistence of the `Model`.
```python
malibu = Album(name="Malibu")
await malibu.save()
```
!!!tip
Check other `Model` methods in [models][models]
## get_or_create
`get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs) -> Tuple[Model, bool]`
Combination of create and get methods.
Tries to get a row meeting the criteria and if `NoMatch` exception is raised it creates
a new one with given kwargs and _defaults.
When `_defaults` dictionary is provided the values set in `_defaults` will **always** be set, including overwriting explicitly provided values.
i.e. `get_or_create(_defaults: {"title": "I win"}, title="never used")` will always use "I win" as title whether you provide your own value in kwargs or not.
```python
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="album"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer()
```
```python
album, created = await Album.objects.get_or_create(name='The Cat', _defaults={"year": 1999})
assert created is True
assert album.name == "The Cat"
assert album.year == 1999
# object is created as it does not exist
album2, created = await Album.objects.get_or_create(name='The Cat')
assert created is False
assert album == album2
# return True as the same db row is returned
```
!!!warning
Despite being a equivalent row from database the `album` and `album2` in
example above are 2 different python objects!
Updating one of them will not refresh the second one until you explicitly load() the
fresh data from db.
!!!note
Note that if you want to create a new object you either have to pass pk column
value or pk column has to be set as autoincrement
## update_or_create
`update_or_create(**kwargs) -> Model`
Updates the model, or in case there is no match in database creates a new one.
```Python hl_lines="40-48"
--8<-- "../docs_src/queries/docs003.py"
```
!!!note
Note that if you want to create a new object you either have to pass pk column
value or pk column has to be set as autoincrement
## bulk_create
`bulk_create(objects: List["Model"]) -> None`
Allows you to create multiple objects at once.
A valid list of `Model` objects needs to be passed.
```python hl_lines="26-32"
--8<-- "../docs_src/queries/docs004.py"
```
## Model methods
Each model instance have a set of methods to `save`, `update` or `load` itself.
###save
You can create new models by using `QuerySet.create()` method or by initializing your model as a normal pydantic model
and later calling `save()` method.
!!!tip
Read more about `save()` method in [models-save][models-save]
###upsert
It's a proxy to either `save()` or `update(**kwargs)` methods of a Model.
If the pk is not set the `save()` method will be called.
!!!tip
Read more about `upsert()` method in [models-upsert][models-upsert]
###save_related
Method goes through all relations of the `Model` on which the method is called,
and calls `upsert()` method on each model that is **not** saved.
!!!tip
Read more about `save_related()` method in [models-save-related][models-save-related]
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey` returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create, select related etc related models directly from parent model.
### create
Works exactly the same as [create](./#create) function above but allows you to create related objects
from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### get_or_create
Works exactly the same as [get_or_create](./#get_or_create) function above but allows you to query or create related objects
from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### update_or_create
Works exactly the same as [update_or_create](./#update_or_create) function above but allows you to update or create related objects
from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[models]: ../models/methods.md
[models-save]: ../models/methods.md#save
[models-upsert]: ../models/methods.md#upsert
[models-save-related]: ../models/methods.md#save_related
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/delete.md 0000664 0000000 0000000 00000007012 15130200524 0021142 0 ustar 00root root 0000000 0000000 # Delete data from database
Following methods allow you to delete data from the database.
* `delete(each: bool = False, **kwargs) -> int`
* `Model`
* `Model.delete()` method
* `QuerysetProxy`
* `QuerysetProxy.remove()` method
* `QuerysetProxy.clear()` method
## delete
`delete(each: bool = False, **kwargs) -> int`
QuerySet level delete is used to delete multiple records at once.
You either have to filter the QuerySet first or provide a `each=True` flag to delete
whole table.
If you do not provide this flag or a filter a `QueryDefinitionError` will be raised.
Return number of rows deleted.
```python hl_lines="40-44"
--8<-- "../docs_src/queries/docs005.py"
```
## Model methods
Each model instance have a set of methods to `save`, `update` or `load` itself.
### delete
You can delete model instance by calling `delete()` method on it.
!!!tip
Read more about `delete()` method in [models methods](../models/methods.md#delete)
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
### remove
Removal of the related model one by one.
Removes the relation in the database.
If you specify the keep_reversed flag to `False` `ormar` will also delete the related model from the database.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True)
```
```python
album = await Album(name="Malibu").save()
track1 = await Track(
album=album, title="The Bird", position=1, play_count=30,
).save()
# remove through proxy from reverse side of relation
await album.tracks.remove(track1, keep_reversed=False)
# the track was also deleted
tracks = await Track.objects.all()
assert len(tracks) == 0
```
### clear
Removal of all related models in one call.
Removes also the relation in the database.
If you specify the keep_reversed flag to `False` `ormar` will also delete the related model from the database.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True)
```
```python
album = await Album(name="Malibu").save()
track1 = await Track(
album=album,
title="The Bird",
position=1,
play_count=30,
).save()
track2 = await Track(
album=album,
title="Heart don't stand a chance",
position=2,
play_count=20,
).save()
# removes the relation only -> clears foreign keys on tracks
await album.tracks.clear()
# removes also the tracks
await album.tracks.clear(keep_reversed=False)
```
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/filter-and-sort.md 0000664 0000000 0000000 00000065131 15130200524 0022720 0 ustar 00root root 0000000 0000000 # Filtering and sorting data
You can use following methods to filter the data (sql where clause).
* `filter(*args, **kwargs) -> QuerySet`
* `exclude(*args, **kwargs) -> QuerySet`
* `get(*args, **kwargs) -> Model`
* `get_or_none(*args, **kwargs) -> Optional[Model]`
* `get_or_create(_defaults: Optional[Dict[str, Any]] = None, *args, **kwargs) -> Tuple[Model, bool]`
* `all(*args, **kwargs) -> List[Optional[Model]]`
* `QuerysetProxy`
* `QuerysetProxy.filter(*args, **kwargs)` method
* `QuerysetProxy.exclude(*args, **kwargs)` method
* `QuerysetProxy.get(*args, **kwargs)` method
* `QuerysetProxy.get_or_none(*args, **kwargs)` method
* `QuerysetProxy.get_or_create(_defaults: Optional[Dict[str, Any]] = None, *args, **kwargs)` method
* `QuerysetProxy.all(*args, **kwargs)` method
And following methods to sort the data (sql order by clause).
* `order_by(columns:Union[List, str, OrderAction]) -> QuerySet`
* `QuerysetProxy`
* `QuerysetProxy.order_by(columns:Union[List, str, OrderAction])` method
## Filtering
### filter
`filter(*args, **kwargs) -> QuerySet`
Allows you to filter by any `Model` attribute/field as well as to fetch instances, with
a filter across an FK relationship.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
name: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True)
```
```python
track = Track.objects.filter(name="The Bird").get()
# will return a track with name equal to 'The Bird'
tracks = Track.objects.filter(album__name="Fantasies").all()
# will return all tracks where the columns album name = 'Fantasies'
```
### Django style filters
You can use special filter suffix to change the filter operands:
* **exact** - exact match to value, sql `column = `
* can be written as`album__name__exact='Malibu'`
* **iexact** - exact match sql `column = ` (case insensitive)
* can be written as`album__name__iexact='malibu'`
* **contains** - sql `column LIKE '%%'`
* can be written as`album__name__contains='Mal'`
* **icontains** - sql `column LIKE '%%'` (case insensitive)
* can be written as`album__name__icontains='mal'`
* **in** - sql ` column IN (, , ...)`
* can be written as`album__name__in=['Malibu', 'Barclay']`
* **isnull** - sql `column IS NULL` (and sql `column IS NOT NULL`)
* can be written as`album__name__isnull=True` (isnotnull `album__name__isnull=False`)
* **gt** - sql `column > ` (greater than)
* can be written as`position__gt=3`
* **gte** - sql `column >= ` (greater or equal than)
* can be written as`position__gte=3`
* **lt** - sql `column < ` (lower than)
* can be written as`position__lt=3`
* **lte** - sql `column <= ` (lower equal than)
* can be written as`position__lte=3`
* **startswith** - sql `column LIKE '%'` (exact start match)
* can be written as`album__name__startswith='Mal'`
* **istartswith** - sql `column LIKE '%'` (case insensitive)
* can be written as`album__name__istartswith='mal'`
* **endswith** - sql `column LIKE '%'` (exact end match)
* can be written as`album__name__endswith='ibu'`
* **iendswith** - sql `column LIKE '%'` (case insensitive)
* can be written as`album__name__iendswith='IBU'`
Some samples:
```python
# sql: ( product.name = 'Test' AND product.rating >= 3.0 )
Product.objects.filter(name='Test', rating__gte=3.0).get()
# sql: ( product.name = 'Test' AND product.rating >= 3.0 )
# OR (categories.name IN ('Toys', 'Books'))
Product.objects.filter(
ormar.or_(
ormar.and_(name='Test', rating__gte=3.0),
categories__name__in=['Toys', 'Books'])
).get()
# note: to read more about and_ and or_ read complex filters section below
```
### Python style filters
* **exact** - exact match to value, sql `column = `
* can be written as `Track.album.name == 'Malibu`
* **iexact** - exact match sql `column = ` (case insensitive)
* can be written as `Track.album.name.iexact('malibu')`
* **contains** - sql `column LIKE '%%'`
* can be written as `Track.album.name % 'Mal')`
* can be written as `Track.album.name.contains('Mal')`
* **icontains** - sql `column LIKE '%%'` (case insensitive)
* can be written as `Track.album.name.icontains('mal')`
* **in** - sql ` column IN (, , ...)`
* can be written as `Track.album.name << ['Malibu', 'Barclay']`
* can be written as `Track.album.name.in_(['Malibu', 'Barclay'])`
* **isnull** - sql `column IS NULL` (and sql `column IS NOT NULL`)
* can be written as `Track.album.name >> None`
* can be written as `Track.album.name.isnull(True)`
* not null can be written as `Track.album.name.isnull(False)`
* not null can be written as `~(Track.album.name >> None)`
* not null can be written as `~(Track.album.name.isnull(True))`
* **gt** - sql `column > ` (greater than)
* can be written as `Track.album.name > 3`
* **gte** - sql `column >= ` (greater or equal than)
* can be written as `Track.album.name >= 3`
* **lt** - sql `column < ` (lower than)
* can be written as `Track.album.name < 3`
* **lte** - sql `column <= ` (lower equal than)
* can be written as `Track.album.name <= 3`
* **startswith** - sql `column LIKE '%'` (exact start match)
* can be written as `Track.album.name.startswith('Mal')`
* **istartswith** - sql `column LIKE '%'` (case insensitive)
* can be written as `Track.album.name.istartswith('mal')`
* **endswith** - sql `column LIKE '%'` (exact end match)
* can be written as `Track.album.name.endswith('ibu')`
* **iendswith** - sql `column LIKE '%'` (case insensitive)
* can be written as `Track.album.name.iendswith('IBU')`
Some samples:
```python
# sql: ( product.name = 'Test' AND product.rating >= 3.0 )
Product.objects.filter(
(Product.name == 'Test') & (Product.rating >= 3.0)
).get()
# sql: ( product.name = 'Test' AND product.rating >= 3.0 )
# OR (categories.name IN ('Toys', 'Books'))
Product.objects.filter(
((Product.name == 'Test') & (Product.rating >= 3.0)) |
(Product.categories.name << ['Toys', 'Books'])
).get()
```
!!!note
All methods that do not return the rows explicitly returns a QuerySet instance so
you can chain them together
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.object.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
!!!warning
Note that you do not have to specify the `%` wildcard in contains and other
filters, it's added for you. If you include `%` in your search value it will be escaped
and treated as literal percentage sign inside the text.
### exclude
`exclude(*args, **kwargs) -> QuerySet`
Works exactly the same as filter and all modifiers (suffixes) are the same, but returns
a not condition.
So if you use `filter(name='John')` which equals to `where name = 'John'` in SQL,
the `exclude(name='John')` equals to `where name <> 'John'`
Note that all conditions are joined so if you pass multiple values it becomes a union of
conditions.
`exclude(name='John', age>=35)` will become `where not (name='John' and age>=35)`
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
name: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True)
```
```python
notes = await Track.objects.exclude(position_gt=3).all()
# returns all tracks with position < 3
```
## Complex filters (including OR)
By default both `filter()` and `exclude()` methods combine provided filter options with
`AND` condition so `filter(name="John", age__gt=30)` translates into `WHERE name = 'John' AND age > 30`.
Sometimes it's useful to query the database with conditions that should not be applied
jointly like `WHERE name = 'John' OR age > 30`, or build a complex where query that you would
like to have bigger control over. After all `WHERE (name = 'John' OR age > 30) and city='New York'` is
completely different than `WHERE name = 'John' OR (age > 30 and city='New York')`.
In order to build `OR` and nested conditions ormar provides two functions that can be used in
`filter()` and `exclude()` in `QuerySet` and `QuerysetProxy`.
!!!note
Note that you can provide those methods in any other method like `get()` or `all()` that accepts `*args`.
Call to `or_` and `and_` can be nested in each other, as well as combined with keyword arguments.
Since it sounds more complicated than it is, let's look at some examples.
Given a sample models like this:
```python
base_ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
```
Let's create some sample data:
```python
tolkien = await Author(name="J.R.R. Tolkien").save()
await Book(author=tolkien, title="The Hobbit", year=1933).save()
await Book(author=tolkien, title="The Lord of the Rings", year=1955).save()
await Book(author=tolkien, title="The Silmarillion", year=1977).save()
sapkowski = await Author(name="Andrzej Sapkowski").save()
await Book(author=sapkowski, title="The Witcher", year=1990).save()
await Book(author=sapkowski, title="The Tower of Fools", year=2002).save()
```
We can construct some sample complex queries:
Let's select books of Tolkien **OR** books written after 1970
sql:
`WHERE ( authors.name = 'J.R.R. Tolkien' OR books.year > 1970 )`
#### Django style
```python
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(author__name="J.R.R. Tolkien", year__gt=1970))
.all()
)
assert len(books) == 5
```
#### Python style
```python
books = (
await Book.objects.select_related("author")
.filter((Book.author.name=="J.R.R. Tolkien") | (Book.year > 1970))
.all()
)
assert len(books) == 5
```
Now let's select books written after 1960 or before 1940 which were written by Tolkien.
sql:
`WHERE ( books.year > 1960 OR books.year < 1940 ) AND authors.name = 'J.R.R. Tolkien'`
#### Django style
```python
# OPTION 1 - split and into separate call
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(year__gt=1960, year__lt=1940))
.filter(author__name="J.R.R. Tolkien")
.all()
)
assert len(books) == 2
# OPTION 2 - all in one
books = (
await Book.objects.select_related("author")
.filter(
ormar.and_(
ormar.or_(year__gt=1960, year__lt=1940),
author__name="J.R.R. Tolkien",
)
)
.all()
)
assert len(books) == 2
assert books[0].title == "The Hobbit"
assert books[1].title == "The Silmarillion"
```
#### Python style
```python
books = (
await Book.objects.select_related("author")
.filter((Book.year > 1960) | (Book.year < 1940))
.filter(Book.author.name == "J.R.R. Tolkien")
.all()
)
assert len(books) == 2
# OPTION 2 - all in one
books = (
await Book.objects.select_related("author")
.filter(
(
(Book.year > 1960) | (Book.year < 1940)
) & (Book.author.name == "J.R.R. Tolkien")
)
.all()
)
assert len(books) == 2
assert books[0].title == "The Hobbit"
assert books[1].title == "The Silmarillion"
```
Books of Sapkowski from before 2000 or books of Tolkien written after 1960
sql:
`WHERE ( ( books.year > 1960 AND authors.name = 'J.R.R. Tolkien' ) OR ( books.year < 2000 AND authors.name = 'Andrzej Sapkowski' ) ) `
#### Django style
```python
books = (
await Book.objects.select_related("author")
.filter(
ormar.or_(
ormar.and_(year__gt=1960, author__name="J.R.R. Tolkien"),
ormar.and_(year__lt=2000, author__name="Andrzej Sapkowski"),
)
)
.all()
)
assert len(books) == 2
```
#### Python style
```python
books = (
await Book.objects.select_related("author")
.filter(
((Book.year > 1960) & (Book.author.name == "J.R.R. Tolkien")) |
((Book.year < 2000) & (Book.author.name == "Andrzej Sapkowski"))
)
.all()
)
assert len(books) == 2
```
Of course those functions can have more than 2 conditions, so if we for example want
books that contains 'hobbit':
sql:
`WHERE ( ( books.year > 1960 AND authors.name = 'J.R.R. Tolkien' ) OR
( books.year < 2000 AND os0cec_authors.name = 'Andrzej Sapkowski' ) OR
books.title LIKE '%hobbit%' )`
#### Django style
```python
books = (
await Book.objects.select_related("author")
.filter(
ormar.or_(
ormar.and_(year__gt=1960, author__name="J.R.R. Tolkien"),
ormar.and_(year__lt=2000, author__name="Andrzej Sapkowski"),
title__icontains="hobbit",
)
)
.all()
)
```
#### Python style
```python
books = (
await Book.objects.select_related("author")
.filter(
((Book.year > 1960) & (Book.author.name == "J.R.R. Tolkien")) |
((Book.year < 2000) & (Book.author.name == "Andrzej Sapkowski")) |
(Book.title.icontains("hobbit"))
)
.all()
)
```
If you want or need to you can nest deeper conditions as deep as you want, in example to
achieve a query like this:
sql:
```
WHERE ( ( ( books.year > 1960 OR books.year < 1940 )
AND authors.name = 'J.R.R. Tolkien' ) OR
( books.year < 2000 AND authors.name = 'Andrzej Sapkowski' ) )
```
You can construct a query as follows:
#### Django style
```python
books = (
await Book.objects.select_related("author")
.filter(
ormar.or_(
ormar.and_(
ormar.or_(year__gt=1960, year__lt=1940),
author__name="J.R.R. Tolkien",
),
ormar.and_(year__lt=2000, author__name="Andrzej Sapkowski"),
)
)
.all()
)
assert len(books) == 3
assert books[0].title == "The Hobbit"
assert books[1].title == "The Silmarillion"
assert books[2].title == "The Witcher"
```
#### Python style
```python
books = (
await Book.objects.select_related("author")
.filter(
(
(
(Book.year > 1960) |
(Book.year < 1940)
) &
(Book.author.name == "J.R.R. Tolkien")
) |
(
(Book.year < 2000) &
(Book.author.name == "Andrzej Sapkowski")
)
)
.all()
)
assert len(books) == 3
assert books[0].title == "The Hobbit"
assert books[1].title == "The Silmarillion"
assert books[2].title == "The Witcher"
```
By now you should already have an idea how `ormar.or_` and `ormar.and_` works.
Of course, you could chain them in any other methods of queryset, so in example a perfectly
valid query can look like follows:
```python
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(year__gt=1980, author__name="Andrzej Sapkowski"))
.filter(title__startswith="The")
.limit(1)
.offset(1)
.order_by("-id")
.all()
)
assert len(books) == 1
assert books[0].title == "The Witcher"
```
Same applies to python style chaining and nesting.
#### Django style
Note that with django style you cannot provide the same keyword argument several times so queries like `filter(ormar.or_(name='Jack', name='John'))` are not allowed. If you want to check the same
column for several values simply use `in` operator: `filter(name__in=['Jack','John'])`.
If you pass only one parameter to `or_` or `and_` functions it's simply wrapped in parenthesis and
has no effect on actual query, so in the end all 3 queries are identical:
```python
await Book.objects.filter(title='The Hobbit').get()
await Book.objects.filter(ormar.or_(title='The Hobbit')).get()
await Book.objects.filter(ormar.and_(title='The Hobbit')).get()
```
!!!note
Note that `or_` and `and_` queries will have `WHERE (title='The Hobbit')` but the parenthesis is redundant and has no real effect.
This feature can be used if you **really** need to use the same field name twice.
Remember that you cannot pass the same keyword arguments twice to the function, so
how you can query in example `WHERE (authors.name LIKE '%tolkien%') OR (authors.name LIKE '%sapkowski%'))`?
You cannot do:
```python
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(
author__name__icontains="tolkien",
author__name__icontains="sapkowski" # you cannot use same keyword twice in or_!
)) # python syntax error
.all()
)
```
But you can do this:
```python
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(
ormar.and_(author__name__icontains="tolkien"), # one argument == just wrapped in ()
ormar.and_(author__name__icontains="sapkowski")
))
.all()
)
assert len(books) == 5
```
#### Python style
Note that with python style you can perfectly use the same fields as many times as you want.
```python
books = (
await Book.objects.select_related("author")
.filter(
(Book.author.name.icontains("tolkien")) |
(Book.author.name.icontains("sapkowski"))
))
.all()
)
```
## get
`get(*args, **kwargs) -> Model`
Gets the first row from the db meeting the criteria set by kwargs.
When any args and/or kwargs are passed it's a shortcut equivalent to calling `filter(*args, **kwargs).get()`
!!!tip
To read more about `filter` go to [filter](./#filter).
To read more about `get` go to [read/get](../read/#get)
## get_or_none
Exact equivalent of get described above but instead of raising the exception returns `None` if no db record matching the criteria is found.
## get_or_create
`get_or_create(_defaults: Optional[Dict[str, Any]] = None, *args, **kwargs) -> Tuple[Model, bool]`
Combination of create and get methods.
When any args and/or kwargs are passed it's a shortcut equivalent to calling `filter(*args, **kwargs).get_or_create()`
!!!tip
To read more about `filter` go to [filter](./#filter).
To read more about `get_or_create` go to [read/get_or_create](../read/#get_or_create)
!!!warning
When given item does not exist you need to pass kwargs for all required fields of the
model, including but not limited to primary_key column (unless it's autoincrement).
## all
`all(*args, **kwargs) -> List[Optional["Model"]]`
Returns all rows from a database for given model for set filter options.
When any kwargs are passed it's a shortcut equivalent to calling `filter(*args, **kwargs).all()`
!!!tip
To read more about `filter` go to [filter](./#filter).
To read more about `all` go to [read/all](../read/#all)
### QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
#### filter
Works exactly the same as [filter](./#filter) function above but allows you to filter related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
#### exclude
Works exactly the same as [exclude](./#exclude) function above but allows you to filter related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
#### get
Works exactly the same as [get](./#get) function above but allows you to filter related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
#### get_or_none
Exact equivalent of get described above but instead of raising the exception returns `None` if no db record matching the criteria is found.
#### get_or_create
Works exactly the same as [get_or_create](./#get_or_create) function above but allows
you to filter related objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
#### all
Works exactly the same as [all](./#all) function above but allows you to filter related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
## Sorting
### order_by
`order_by(columns: Union[List, str, OrderAction]) -> QuerySet`
With `order_by()` you can order the results from database based on your choice of
fields.
You can provide a string with field name or list of strings with different fields.
Ordering in sql will be applied in order of names you provide in order_by.
!!!tip
By default if you do not provide ordering `ormar` explicitly orders by all
primary keys
!!!warning
If you are sorting by nested models that causes that the result rows are
unsorted by the main model
`ormar` will combine those children rows into one main model.
Sample raw database rows result (sort by child model desc):
```
MODEL: 1 - Child Model - 3
MODEL: 2 - Child Model - 2
MODEL: 1 - Child Model - 1
```
will result in 2 rows of result:
```
MODEL: 1 - Child Models: [3, 1] # encountered first in result, all children rows combined
MODEL: 2 - Child Models: [2]
```
The main model will never duplicate in the result
Given sample Models like following:
```python
--8<-- "../docs_src/queries/docs007.py"
```
To order by main model field just provide a field name
#### Django style
```python
toys = await Toy.objects.select_related("owner").order_by("name").all()
assert [x.name.replace("Toy ", "") for x in toys] == [
str(x + 1) for x in range(6)
]
assert toys[0].owner == zeus
assert toys[1].owner == aphrodite
```
#### Python style
```python
toys = await Toy.objects.select_related("owner").order_by(Toy.name.asc()).all()
assert [x.name.replace("Toy ", "") for x in toys] == [
str(x + 1) for x in range(6)
]
assert toys[0].owner == zeus
assert toys[1].owner == aphrodite
```
To sort on nested models separate field names with dunder '__'.
You can sort this way across all relation types -> `ForeignKey`, reverse virtual FK
and `ManyToMany` fields.
#### Django style
```python
toys = await Toy.objects.select_related("owner").order_by("owner__name").all()
assert toys[0].owner.name == toys[1].owner.name == "Aphrodite"
assert toys[2].owner.name == toys[3].owner.name == "Hermes"
assert toys[4].owner.name == toys[5].owner.name == "Zeus"
```
#### Python style
```python
toys = await Toy.objects.select_related("owner").order_by(Toy.owner.name.asc()).all()
assert toys[0].owner.name == toys[1].owner.name == "Aphrodite"
assert toys[2].owner.name == toys[3].owner.name == "Hermes"
assert toys[4].owner.name == toys[5].owner.name == "Zeus"
```
To sort in descending order provide a hyphen in front of the field name
#### Django style
```python
owner = (
await Owner.objects.select_related("toys")
.order_by("-toys__name")
.filter(name="Zeus")
.get()
)
assert owner.toys[0].name == "Toy 4"
assert owner.toys[1].name == "Toy 1"
```
#### Python style
```python
owner = (
await Owner.objects.select_related("toys")
.order_by(Owner.toys.name.desc())
.filter(Owner.name == "Zeus")
.get()
)
assert owner.toys[0].name == "Toy 4"
assert owner.toys[1].name == "Toy 1"
```
!!!note
All methods that do not return the rows explicitly returns a QuerySet instance so
you can chain them together
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.object.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
### Default sorting in ormar
Since order of rows in a database is not guaranteed, `ormar` **always** issues an `order by` sql clause to each (part of) query even if you do not provide order yourself.
When querying the database with given model by default the `Model` is ordered by the `primary_key`
column ascending. If you wish to change the default behaviour you can do it by providing `orders_by`
parameter to `ormar_config`.
!!!tip
To read more about models sort order visit [models](../models/index.md#model-sort-order) section of documentation
By default the relations follow the same ordering, but you can modify the order in which related models are loaded during query by providing `orders_by` and `related_orders_by`
parameters to relations.
!!!tip
To read more about models sort order visit [relations](../relations/index.md#relationship-default-sort-order) section of documentation
Order in which order_by clauses are applied is as follows:
* Explicitly passed `order_by()` calls in query
* Relation passed `orders_by` and `related_orders_by` if exists
* Model's `ormar_config` object `orders_by`
* Model's `primary_key` column ascending (fallback, used if none of above provided)
**Order from only one source is applied to each `Model` (so that you can always overwrite it in a single query).**
That means that if you provide explicit `order_by` for a model in a query, the `Relation` and `Model` sort orders are skipped.
If you provide a `Relation` one, the `Model` sort is skipped.
Finally, if you provide one for `Model` the default one by `primary_key` is skipped.
### QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
#### order_by
Works exactly the same as [order_by](./#order_by) function above but allows you to sort related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/index.md 0000664 0000000 0000000 00000014227 15130200524 0021015 0 ustar 00root root 0000000 0000000 # Querying database with ormar
## QuerySet
Each Model is auto registered with a `QuerySet` that represents the underlying query,
and it's options.
Most of the methods are also available through many to many relations and on reverse
foreign key relations through `QuerysetProxy` interface.
!!!info
To see which relations are supported and how to construct relations
visit [relations][relations].
For simplicity available methods to fetch and save the data into the database are
divided into categories according to the function they fulfill.
Note that some functions/methods are in multiple categories.
For completeness, Model and relation methods are listed.
To read more about any specific section or function please refer to the details subpage.
###[Insert data into database](./create.md)
* `create(**kwargs) -> Model`
* `get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs) -> Tuple[Model, bool]`
* `update_or_create(**kwargs) -> Model`
* `bulk_create(objects: List[Model]) -> None`
* `Model`
* `Model.save()` method
* `Model.upsert()` method
* `Model.save_related()` method
* `QuerysetProxy`
* `QuerysetProxy.create(**kwargs)` method
* `QuerysetProxy.get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs)` method
* `QuerysetProxy.update_or_create(**kwargs)` method
!!!tip
To read more about any or all of those functions visit [create](./create.md) section.
### [Read data from database](./read.md)
* `get(**kwargs) -> Model`
* `get_or_none(**kwargs) -> Optional[Model]`
* `get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs) -> Tuple[Model, bool]`
* `first() -> Model`
* `all(**kwargs) -> List[Optional[Model]]`
* `Model`
* `Model.load()` method
* `QuerysetProxy`
* `QuerysetProxy.get(**kwargs)` method
* `QuerysetProxy.get_or_none(**kwargs)` method
* `QuerysetProxy.get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs)` method
* `QuerysetProxy.first()` method
* `QuerysetProxy.all(**kwargs)` method
!!!tip
To read more about any or all of those functions visit [read](./read.md) section.
### [Read raw data from database](./raw-data.md)
Instead of ormar models return raw data in form list of dictionaries or tuples.
* `values(fields = None, exclude_through = False) -> List[Dict]`
* `values_list(fields = None, exclude_through = False, flatten = False) -> List`
* `QuerysetProxy`
* `QuerysetProxy.values(fields = None, exclude_through = False)` method
* `QuerysetProxy.values_list(fields = None, exclude_through= False, flatten = False)` method
!!!tip
To read more about any or all of those functions visit [raw data](./raw-data.md) section.
### [Update data in database](./update.md)
* `update(each: bool = False, **kwargs) -> int`
* `update_or_create(**kwargs) -> Model`
* `bulk_update(objects: List[Model], columns: List[str] = None) -> None`
* `Model`
* `Model.update()` method
* `Model.upsert()` method
* `Model.save_related()` method
* `QuerysetProxy`
* `QuerysetProxy.update_or_create(**kwargs)` method
!!!tip
To read more about any or all of those functions visit [update](./update.md) section.
### [Delete data from database](./delete.md)
* `delete(each: bool = False, **kwargs) -> int`
* `Model`
* `Model.delete()` method
* `QuerysetProxy`
* `QuerysetProxy.remove()` method
* `QuerysetProxy.clear()` method
!!!tip
To read more about any or all of those functions visit [delete](./delete.md) section.
### [Joins and subqueries](./joins-and-subqueries.md)
* `select_related(related: Union[List, str]) -> QuerySet`
* `prefetch_related(related: Union[List, str]) -> QuerySet`
* `Model`
* `Model.load()` method
* `QuerysetProxy`
* `QuerysetProxy.select_related(related: Union[List, str])` method
* `QuerysetProxy.prefetch_related(related: Union[List, str])` method
!!!tip
To read more about any or all of those functions visit [joins and subqueries](./joins-and-subqueries.md) section.
### [Filtering and sorting](./filter-and-sort.md)
* `filter(**kwargs) -> QuerySet`
* `exclude(**kwargs) -> QuerySet`
* `order_by(columns:Union[List, str]) -> QuerySet`
* `get(**kwargs) -> Model`
* `get_or_none(**kwargs) -> Optional[Model]`
* `get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs) -> Tuple[Model, bool]`
* `all(**kwargs) -> List[Optional[Model]]`
* `QuerysetProxy`
* `QuerysetProxy.filter(**kwargs)` method
* `QuerysetProxy.exclude(**kwargs)` method
* `QuerysetProxy.order_by(columns:Union[List, str])` method
* `QuerysetProxy.get(**kwargs)` method
* `QuerysetProxy.get_or_none(**kwargs)` method
* `QuerysetProxy.get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs)` method
* `QuerysetProxy.all(**kwargs)` method
!!!tip
To read more about any or all of those functions visit [filtering and sorting](./filter-and-sort.md) section.
### [Selecting columns](./select-columns.md)
* `fields(columns: Union[List, str, set, dict]) -> QuerySet`
* `exclude_fields(columns: Union[List, str, set, dict]) -> QuerySet`
* `QuerysetProxy`
* `QuerysetProxy.fields(columns: Union[List, str, set, dict])` method
* `QuerysetProxy.exclude_fields(columns: Union[List, str, set, dict])` method
!!!tip
To read more about any or all of those functions visit [selecting columns](./select-columns.md) section.
### [Pagination and rows number](./pagination-and-rows-number.md)
* `paginate(page: int) -> QuerySet`
* `limit(limit_count: int) -> QuerySet`
* `offset(offset: int) -> QuerySet`
* `get() -> Model`
* `first() -> Model`
* `QuerysetProxy`
* `QuerysetProxy.paginate(page: int)` method
* `QuerysetProxy.limit(limit_count: int)` method
* `QuerysetProxy.offset(offset: int)` method
!!!tip
To read more about any or all of those functions visit [pagination](./pagination-and-rows-number.md) section.
### [Aggregated functions](./aggregations.md)
* `count(distinct: bool = True) -> int`
* `exists() -> bool`
* `QuerysetProxy`
* `QuerysetProxy.count(distinct=True)` method
* `QuerysetProxy.exists()` method
!!!tip
To read more about any or all of those functions visit [aggregations](./aggregations.md) section.
[relations]: ../relations/index.md
collerek-ormar-c09209a/docs/queries/joins-and-subqueries.md 0000664 0000000 0000000 00000041075 15130200524 0023756 0 ustar 00root root 0000000 0000000 # Joins and subqueries
To join one table to another, so load also related models you can use following methods.
* `select_related(related: Union[List, str]) -> QuerySet`
* `select_all(follow: bool = True) -> QuerySet`
* `prefetch_related(related: Union[List, str]) -> QuerySet`
* `Model`
* `Model.load()` method
* `QuerysetProxy`
* `QuerysetProxy.select_related(related: Union[List, str])` method
* `QuerysetProxy.select_all(follow: bool=True)` method
* `QuerysetProxy.prefetch_related(related: Union[List, str])` method
## select_related
`select_related(related: Union[List, str]) -> QuerySet`
Allows to prefetch related models during the same query.
**With `select_related` always only one query is run against the database**, meaning
that one (sometimes complicated) join is generated and later nested models are processed in
python.
To fetch related model use `ForeignKey` names.
To chain related `Models` relation use double underscores between names.
!!!note
If you are coming from `django` note that `ormar` `select_related` differs ->
in `django` you can `select_related`
only single relation types, while in `ormar` you can select related across `ForeignKey`
relation, reverse side of `ForeignKey` (so virtual auto generated keys) and `ManyToMany`
fields (so all relations as of current version).
!!!tip
To control which model fields to select use `fields()`
and `exclude_fields()` `QuerySet` methods.
!!!tip
To control order of models (both main or nested) use `order_by()` method.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True)
```
```python
# Django style
album = await Album.objects.select_related("tracks").all()
# Python style
album = await Album.objects.select_related(Album.tracks).all()
# will return album with all columns tracks
```
You can provide a string or a list of strings (or a field/ list of fields)
```python
class SchoolClass(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="schoolclasses")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
department: Optional[Department] = ormar.ForeignKey(Department, nullable=False)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(SchoolClass)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
class Teacher(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(SchoolClass)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
```
```python
# Django style
classes = await SchoolClass.objects.select_related(
["teachers__category", "students"]).all()
# Python style
classes = await SchoolClass.objects.select_related(
[SchoolClass.teachers.category, SchoolClass.students]).all()
# will return classes with teachers and teachers categories
# as well as classes students
```
Exactly the same behavior is for Many2Many fields, where you put the names of Many2Many
fields and the final `Models` are fetched for you.
!!!warning
If you set `ForeignKey` field as not nullable (so required) during all
queries the not nullable `Models` will be auto prefetched, even if you do not include
them in select_related.
!!!note
All methods that do not return the rows explicitly returns a QuerySet instance so
you can chain them together
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.object.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
## select_all
`select_all(follow: bool = False) -> QuerySet`
By default when you select `all()` none of the relations are loaded, likewise,
when `select_related()` is used you need to explicitly specify all relations that should
be loaded. If you want to include also nested relations this can be cumberstone.
That's why `select_all()` was introduced, so by default load all relations of a model
(so kind of opposite as with `all()` approach).
By default adds only directly related models of a parent model (from which the query is run).
If `follow=True` is set it adds also related models of related models.
!!!info
To not get stuck in an infinite loop as related models also keep a relation
to parent model visited models set is kept.
That way already visited models that are nested are loaded, but the load do not
follow them inside. So Model A -> Model B -> Model C -> Model A -> Model X
will load second Model A but will never follow into Model X.
Nested relations of those kind need to be loaded manually.
With sample date like follow:
```python
base_ormar_config = OrmarConfig(
database=databases.Database(DATABASE_URL, force_rollback=True),
metadata=sqlalchemy.MetaData(),
)
class Address(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="addresses")
id: int = ormar.Integer(primary_key=True)
street: str = ormar.String(max_length=100, nullable=False)
number: int = ormar.Integer(nullable=False)
post_code: str = ormar.String(max_length=20, nullable=False)
class Branch(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="branches")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False)
address = ormar.ForeignKey(Address)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="company_name")
founded: int = ormar.Integer(nullable=True)
branches = ormar.ManyToMany(Branch)
```
To select all `Companies` with all `Branches` and `Addresses` you can simply query:
```python
companies = await Company.objects.select_all(follow=True).all()
# which is equivalent to:
companies = await Company.objects.select_related('branches__address').all()
```
Of course in this case it's quite easy to issue explicit relation names in `select_related`,
but the benefit of `select_all()` shows when you have multiple relations.
If for example `Company` would have 3 relations and all of those 3 relations have it's own
3 relations you would have to issue 9 relation strings to `select_related`, `select_all()`
is also resistant to change in names of relations.
!!!note
Note that you can chain `select_all()` with other `QuerySet` methods like `filter`, `exclude_fields` etc.
To exclude relations use `exclude_fields()` call with names of relations (also nested) to exclude.
## prefetch_related
`prefetch_related(related: Union[List, str]) -> QuerySet`
Allows to prefetch related models during query - but opposite to `select_related` each
subsequent model is fetched in a separate database query.
**With `prefetch_related` always one query per Model is run against the database**,
meaning that you will have multiple queries executed one after another.
To fetch related model use `ForeignKey` names.
To chain related `Models` relation use double underscores between names.
!!!tip
To control which model fields to select use `fields()`
and `exclude_fields()` `QuerySet` methods.
!!!tip
To control order of models (both main or nested) use `order_by()` method.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True)
```
```python
# Django style
album = await Album.objects.prefetch_related("tracks").all()
# Python style
album = await Album.objects.prefetch_related(Album.tracks).all()
# will return album will all columns tracks
```
You can provide a string, or a list of strings
```python
class SchoolClass(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="schoolclasses")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
department: Optional[Department] = ormar.ForeignKey(Department, nullable=False)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(SchoolClass)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
class Teacher(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(SchoolClass)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
```
```python
# Django style
classes = await SchoolClass.objects.prefetch_related(
["teachers__category", "students"]).all()
# Python style
classes = await SchoolClass.objects.prefetch_related(
[SchoolClass.teachers.category, SchoolClass.students]).all()
# will return classes with teachers and teachers categories
# as well as classes students
```
Exactly the same behavior is for Many2Many fields, where you put the names of Many2Many
fields and the final `Models` are fetched for you.
!!!warning
If you set `ForeignKey` field as not nullable (so required) during all
queries the not nullable `Models` will be auto prefetched, even if you do not include
them in select_related.
!!!note
All methods that do not return the rows explicitly returns a QuerySet instance so
you can chain them together
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.object.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
## select_related vs prefetch_related
Which should you use -> `select_related` or `prefetch_related`?
Well, it really depends on your data. The best answer is try yourself and see which one
performs faster/better in your system constraints.
What to keep in mind:
### Performance
**Number of queries**:
`select_related` always executes one query against the database,
while `prefetch_related` executes multiple queries. Usually the query (I/O) operation is
the slowest one but it does not have to be.
**Number of rows**:
Imagine that you have 10 000 object in one table A and each of those objects have 3
children in table B, and subsequently each object in table B has 2 children in table C.
Something like this:
```
Model C
/
Model B - Model C
/
Model A - Model B - Model C
\ \
\ Model C
\
Model B - Model C
\
Model C
```
That means that `select_related` will always return 60 000 rows (10 000 * 3 * 2) later
compacted to 10 000 models.
How many rows will return `prefetch_related`?
Well, that depends, if each of models B and C is unique it will return 10 000 rows in
first query, 30 000 rows
(each of 3 children of A in table B are unique) in second query and 60 000 rows (each of
2 children of model B in table C are unique) in 3rd query.
In this case `select_related` seems like a better choice, not only it will run one query
comparing to 3 of
`prefetch_related` but will also return 60 000 rows comparing to 100 000
of `prefetch_related` (10+30+60k).
But what if each Model A has exactly the same 3 models B and each models C has exactly
same models C? `select_related`
will still return 60 000 rows, while `prefetch_related` will return 10 000 for model A,
3 rows for model B and 2 rows for Model C. So in total 10 006 rows. Now depending on the
structure of models (i.e. if it has long Text() fields etc.) `prefetch_related`
might be faster despite it needs to perform three separate queries instead of one.
#### Memory
`ormar` is does not keep a registry of already loaded models.
That means that in `select_related` example above you will always have 10 000 Models A,
30 000 Models B
(even if the unique number of rows in db is 3 - processing of `select_related` spawns
**new** child models for each parent model). And 60 000 Models C.
If the same Model B is shared by rows 1, 10, 100 etc. and you update one of those, the
rest of rows that share the same child will **not** be updated on the spot. If you
persist your changes into the database the change **will be available only after reload
(either each child separately or the whole query again)**. That means
that `select_related` will use more memory as each child is instantiated as a new object
- obviously using it's own space.
!!!note
This might change in future versions if we decide to introduce caching.
!!!warning
By default all children (or event the same models loaded 2+ times) are
completely independent, distinct python objects, despite that they represent the same
row in db.
They will evaluate to True when compared, so in example above:
```python
# will return True if child1 of both rows is the same child db row
row1.child1 == row100.child1
# same here:
model1 = await Model.get(pk=1)
model2 = await Model.get(pk=1) # same pk = same row in db
# will return `True`
model1 == model2
```
but
```python
# will return False (note that id is a python `builtin` function not ormar one).
id(row1.child1) == id(ro100.child1)
# from above - will also return False
id(model1) == id(model2)
```
On the contrary - with `prefetch_related` each unique distinct child model is
instantiated only once and the same child models is shared across all parent models.
That means that in `prefetch_related` example above if there are 3 distinct models in
table B and 2 in table C, there will be only 5 children nested models shared between all
model A instances. That also means that if you update any attribute it will be updated
on all parents as they share the same child object.
## Model methods
Each model instance have a set of methods to `save`, `update` or `load` itself.
### load
You can load the `ForeignKey` related model by calling `load()` method.
`load()` can be used to refresh the model from the database (if it was changed by some other process).
!!!tip
Read more about `load()` method in [models methods](../models/methods.md#load)
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
### select_related
Works exactly the same as [select_related](./#select_related) function above but allows you to fetch related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### select_all
Works exactly the same as [select_all](./#select_all) function above but allows you to fetch related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### prefetch_related
Works exactly the same as [prefetch_related](./#prefetch_related) function above but allows you to fetch related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/pagination-and-rows-number.md 0000664 0000000 0000000 00000011500 15130200524 0025044 0 ustar 00root root 0000000 0000000 #Pagination and rows number
Following methods allow you to paginate and limit number of rows in queries.
* `paginate(page: int) -> QuerySet`
* `limit(limit_count: int) -> QuerySet`
* `offset(offset: int) -> QuerySet`
* `get() -> Model`
* `first() -> Model`
* `QuerysetProxy`
* `QuerysetProxy.paginate(page: int)` method
* `QuerysetProxy.limit(limit_count: int)` method
* `QuerysetProxy.offset(offset: int)` method
## paginate
`paginate(page: int, page_size: int = 20) -> QuerySet`
Combines the `offset` and `limit` methods based on page number and size
```python
class Track(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
tablename="track"
)
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
name: str = ormar.String(max_length=100)
position: int = ormar.Integer()
```
```python
tracks = await Track.objects.paginate(3).all()
# will return 20 tracks starting at row 41
# (with default page size of 20)
```
Note that `paginate(2)` is equivalent to `offset(20).limit(20)`
## limit
`limit(limit_count: int, limit_raw_sql: bool = None) -> QuerySet`
You can limit the results to desired number of parent models.
To limit the actual number of database query rows instead of number of main models
use the `limit_raw_sql` parameter flag, and set it to `True`.
```python
class Track(ormar.Model):
ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
tablename="track"
)
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
name: str = ormar.String(max_length=100)
position: int = ormar.Integer()
```
```python
tracks = await Track.objects.limit(1).all()
# will return just one Track
```
!!!note
All methods that do not return the rows explicitly returns a QuerySet instance so you can chain them together
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.object.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
## offset
`offset(offset: int, limit_raw_sql: bool = None) -> QuerySet`
You can also offset the results by desired number of main models.
To offset the actual number of database query rows instead of number of main models
use the `limit_raw_sql` parameter flag, and set it to `True`.
```python
class Track(ormar.Model):
ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
tablename="track"
)
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
name: str = ormar.String(max_length=100)
position: int = ormar.Integer()
```
```python
tracks = await Track.objects.offset(1).limit(1).all()
# will return just one Track, but this time the second one
```
!!!note
All methods that do not return the rows explicitly returns a QuerySet instance so you can chain them together
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.object.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
## get
`get(**kwargs) -> Model`
Gets the first row from the db meeting the criteria set by kwargs.
If no criteria is set it will return the last row in db sorted by pk.
(The criteria cannot be set also with filter/exclude).
!!!tip
To read more about `get` visit [read/get](./read/#get)
## first
`first() -> Model`
Gets the first row from the db ordered by primary key column ascending.
!!!tip
To read more about `first` visit [read/first](./read/#first)
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
### paginate
Works exactly the same as [paginate](./#paginate) function above but allows you to paginate related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### limit
Works exactly the same as [limit](./#limit) function above but allows you to paginate related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### offset
Works exactly the same as [offset](./#offset) function above but allows you to paginate related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/raw-data.md 0000664 0000000 0000000 00000027762 15130200524 0021416 0 ustar 00root root 0000000 0000000 # Return raw data
Following methods allow you to execute a query but instead of returning ormar models those will return list of dicts or tuples.
* `values(fields = None, exclude_through = False) -> List[Dict]`
* `values_list(fields = None, exclude_through = False, flatten = False) -> List`
* `QuerysetProxy`
* `QuerysetProxy.values(fields = None, exclude_through = False)` method
* `QuerysetProxy.values_list(fields = None, exclude_through= False, flatten = False)` method
!!!danger
Note that `values` and `values_list` skips parsing the result to ormar models so skips also the validation of the result!
!!!warning
Note that each entry in a result list is one to one reflection of a query result row.
Since rows are not parsed if you have one-to-many or many-to-many relation expect
duplicated columns values in result entries if one parent row have multiple related rows.
## values
`values(fields: Union[List, str, Set, Dict] = None, exclude_through: bool = False) -> List[Dict]`
Return a list of dictionaries representing the values of the columns coming from the database.
You can select a subset of fields with fields parameter, that accepts the same set of parameters as `fields()` method.
Note that passing fields to `values(fields)` is actually a shortcut for calling `fields(fields).values()`.
!!!tip
To read more about what you can pass to fields and how to select nested models fields read [selecting columns](./select-columns.md#fields) docs
You can limit the number of rows by providing conditions in `filter()` and `exclude()`, but note that even if only one row (or no rows!) match your criteria you will return a list in response.
Example:
```python
# declared models
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
sort_order: int = ormar.Integer(nullable=True)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
category: Optional[Category] = ormar.ForeignKey(Category)
# sample data
news = await Category(name="News", sort_order=0).save()
await Post(name="Ormar strikes again!", category=news).save()
await Post(name="Why don't you use ormar yet?", category=news).save()
await Post(name="Check this out, ormar now for free", category=news).save()
```
Access Post models:
```python
posts = await Post.objects.values()
assert posts == [
{"id": 1, "name": "Ormar strikes again!", "category": 1},
{"id": 2, "name": "Why don't you use ormar yet?", "category": 1},
{"id": 3, "name": "Check this out, ormar now for free", "category": 1},
]
```
To select also related models use `select_related` or `prefetch_related`.
Note how nested models columns will be prefixed with full relation path coming from the main model (the one used in a query).
```python
# declare models
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Role(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
users: List[User] = ormar.ManyToMany(User)
# sample data
creator = await User(name="Anonymous").save()
admin = await Role(name="admin").save()
editor = await Role(name="editor").save()
await creator.roles.add(admin)
await creator.roles.add(editor)
```
Select user with roles
```python
user = await User.objects.select_related("roles").values()
# note nested prefixes: roleuser and roles
assert user == [
{
"id": 1,
"name": "Anonymous",
"roleuser__id": 1,
"roleuser__role": 1,
"roleuser__user": 1,
"roles__id": 1,
"roles__name": "admin",
},
{
"id": 1,
"name": "Anonymous",
"roleuser__id": 2,
"roleuser__role": 2,
"roleuser__user": 1,
"roles__id": 2,
"roles__name": "editor",
},
]
```
!!!note
Note how role to users relation is a `ManyToMany` relation so by default you also get through model columns.
Combine select related and fields to select only 3 fields.
Note that we also exclude through model as by definition every model included in a join but without any reference in fields is assumed to be selected in full (all fields included).
!!!note
Note that in contrary to other queryset methods here you can exclude the
in-between models but keep the end columns, which does not make sense
when parsing the raw data into models.
So in relation category -> category_x_post -> post -> user you can exclude
category_x_post and post models but can keep the user one. (in ormar model
context that is not possible as if you would exclude through and post model
there would be no way to reach user model from category model).
```python
user = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
.exclude_fields("roleuser")
.values()
)
assert user == [
{
"name": "admin",
"users__name": "Anonymous",
"users__categories__name": "News",
}
]
```
If you have multiple ManyToMany models in your query you would have to exclude each through model manually.
To avoid this burden `ormar` provides you with `exclude_through=False` parameter.
If you set this flag to True **all through models will be fully excluded**.
```python
# equivalent to query above, note lack of exclude_fields call
user = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
.values(exclude_through=True)
)
assert user == [
{
"name": "admin",
"users__name": "Anonymous",
"users__categories__name": "News",
}
]
```
## values_list
`values_list(fields: Union[List, str, Set, Dict] = None, flatten: bool = False, exclude_through: bool = False) -> List`
Return a list of tuples representing the values of the columns coming from the database.
You can select a subset of fields with fields parameter, that accepts the same set of parameters as `fields()` method.
Note that passing fields to `values_list(fields)` is actually a shortcut for calling `fields(fields).values_list()`.
!!!tip
To read more about what you can pass to fields and how to select nested models fields read [selecting columns](./select-columns.md#fields) docs
If you select only one column/field you can pass `flatten=True` which will return you a list of values instead of list of one element tuples.
!!!warning
Setting `flatten=True` if more than one (or none which means all) fields are selected will raise `QueryDefinitionError` exception.
You can limit the number of rows by providing conditions in `filter()` and `exclude()`, but note that even if only one row (or no rows!) match your criteria you will return a list in response.
Example:
```python
# declared models
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
sort_order: int = ormar.Integer(nullable=True)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
category: Optional[Category] = ormar.ForeignKey(Category)
# sample data
news = await Category(name="News", sort_order=0).save()
await Post(name="Ormar strikes again!", category=news).save()
await Post(name="Why don't you use ormar yet?", category=news).save()
await Post(name="Check this out, ormar now for free", category=news).save()
```
Access Post models:
```python
posts = await Post.objects.values_list()
# note how columns refer to id, name and category (fk)
assert posts == [
(1, "Ormar strikes again!", 1),
(2, "Why don't you use ormar yet?", 1),
(3, "Check this out, ormar now for free", 1),
]
```
To select also related models use `select_related` or `prefetch_related`.
Let's complicate the relation and modify the previously mentioned Category model to refer to User model.
```python
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
sort_order: int = ormar.Integer(nullable=True)
# new column below
created_by: Optional[User] = ormar.ForeignKey(User, related_name="categories")
```
Now create the sample data with link to user.
```python
creator = await User(name="Anonymous").save()
admin = await Role(name="admin").save()
editor = await Role(name="editor").save()
await creator.roles.add(admin)
await creator.roles.add(editor)
news = await Category(name="News", sort_order=0, created_by=creator).save()
```
Combine select related and fields to select only 3 fields.
Note that we also exclude through model as by definition every model included in a join but without any reference in fields is assumed to be selected in full (all fields included).
!!!note
Note that in contrary to other queryset methods here you can exclude the
in-between models but keep the end columns, which does not make sense
when parsing the raw data into models.
So in relation category -> category_x_post -> post -> user you can exclude
category_x_post and post models but can keep the user one. (in ormar model
context that is not possible as if you would exclude through and post model
there would be no way to reach user model from category model).
```python
user = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
.exclude_fields("roleuser")
.values_list()
)
assert user == [("admin", "Anonymous", "News")]
```
If you have multiple ManyToMany models in your query you would have to exclude each through model manually.
To avoid this burden `ormar` provides you with `exclude_through=False` parameter.
If you set this flag to True **all through models will be fully excluded**.
```python
# equivalent to query above, note lack of exclude_fields call
user = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
.values_list(exclude_through=True)
)
assert user == [("admin", "Anonymous", "News")]
```
Use flatten to get list of values.
```python
# using flatten with more than one field will raise exception!
await Role.objects.fields({"name", "id"}).values_list(flatten=True)
# proper usage
roles = await Role.objects.fields("name").values_list(flatten=True)
assert roles == ["admin", "editor"]
```
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
!!!warning
Because using `values` and `values_list` skips parsing of the models and validation, in contrast to all other read methods in querysetproxy those 2 **does not clear currently loaded related models** and **does not overwrite the currently loaded models** with result of own call!
### values
Works exactly the same as [values](./#values) function above but allows you to fetch related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### values_list
Works exactly the same as [values_list](./#values_list) function above but allows
you to query or create related objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/read.md 0000664 0000000 0000000 00000017302 15130200524 0020616 0 ustar 00root root 0000000 0000000 # Read data from database
Following methods allow you to load data from the database.
* `get(*args, **kwargs) -> Model`
* `get_or_create(_defaults: Optional[Dict[str, Any]] = None, *args, **kwargs) -> Tuple[Model, bool]`
* `first(*args, **kwargs) -> Model`
* `all(*args, **kwargs) -> List[Optional[Model]]`
* `iterate(*args, **kwargs) -> AsyncGenerator[Model]`
* `Model`
* `Model.load()` method
* `QuerysetProxy`
* `QuerysetProxy.get(*args, **kwargs)` method
* `QuerysetProxy.get_or_create(_defaults: Optional[Dict[str, Any]] = None, *args, **kwargs)` method
* `QuerysetProxy.first(*args, **kwargs)` method
* `QuerysetProxy.all(*args, **kwargs)` method
## get
`get(*args, **kwargs) -> Model`
Gets the first row from the db meeting the criteria set by kwargs.
If no criteria set it will return the last row in db sorted by pk column.
Passing a criteria is actually calling filter(*args, **kwargs) method described below.
```python
class Track(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="track"
)
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
name: str = ormar.String(max_length=100)
position: int = ormar.Integer()
```
```python
track = await Track.objects.get(name='The Bird')
# note that above is equivalent to await Track.objects.filter(name='The Bird').get()
track2 = track = await Track.objects.get()
track == track2
# True since it's the only row in db in our example
# and get without arguments return first row by pk column desc
```
!!!warning
If no row meets the criteria `NoMatch` exception is raised.
If there are multiple rows meeting the criteria the `MultipleMatches` exception is raised.
## get_or_none
`get_or_none(*args, **kwargs) -> Model`
Exact equivalent of get described above but instead of raising the exception returns `None` if no db record matching the criteria is found.
## get_or_create
`get_or_create(_defaults: Optional[Dict[str, Any]] = None, *args, **kwargs) -> Tuple[Model, bool]`
Combination of create and get methods.
Tries to get a row meeting the criteria and if `NoMatch` exception is raised it creates
a new one with given kwargs and _defaults.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="album")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer()
```
```python
album, created = await Album.objects.get_or_create(name='The Cat', _defaults={"year": 1999})
assert created is True
assert album.name == "The Cat"
assert album.year == 1999
# object is created as it does not exist
album2, created = await Album.objects.get_or_create(name='The Cat')
assert created is False
assert album == album2
# return True as the same db row is returned
```
!!!warning
Despite being an equivalent row from database the `album` and `album2` in
example above are 2 different python objects!
Updating one of them will not refresh the second one until you explicitly load() the
fresh data from db.
!!!note
Note that if you want to create a new object you either have to pass pk column
value or pk column has to be set as autoincrement
## first
`first(*args, **kwargs) -> Model`
Gets the first row from the db ordered by primary key column ascending.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="album")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
```
```python
await Album.objects.create(name='The Cat')
await Album.objects.create(name='The Dog')
album = await Album.objects.first()
# first row by primary_key column asc
assert album.name == 'The Cat'
```
## all
`all(*args, **kwargs) -> List[Optional["Model"]]`
Returns all rows from a database for given model for set filter options.
Passing kwargs is a shortcut and equals to calling `filter(*args, **kwargs).all()`.
If there are no rows meeting the criteria an empty list is returned.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="album")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="track")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
```
```python
tracks = await Track.objects.select_related("album").all(album__title='Sample')
# will return a list of all Tracks for album Sample
# for more on joins visit joining and subqueries section
tracks = await Track.objects.all()
# will return a list of all Tracks in database
```
## iterate
`iterate(*args, **kwargs) -> AsyncGenerator["Model"]`
Return async iterable generator for all rows from a database for given model.
Passing args and/or kwargs is a shortcut and equals to calling `filter(*args, **kwargs).iterate()`.
If there are no rows meeting the criteria an empty async generator is returned.
```python
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="album")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
```
```python
await Album.objects.create(name='The Cat')
await Album.objects.create(name='The Dog')
# will asynchronously iterate all Album models yielding one main model at a time from the generator
async for album in Album.objects.iterate():
print(album.name)
# The Cat
# The Dog
```
!!!warning
Use of `iterate()` causes previous `prefetch_related()` calls to be ignored;
since these two optimizations do not make sense together.
If `iterate()` & `prefetch_related()` are used together the `QueryDefinitionError` exception is raised.
## Model methods
Each model instance have a set of methods to `save`, `update` or `load` itself.
### load
You can load the `ForeignKey` related model by calling `load()` method.
`load()` can be used to refresh the model from the database (if it was changed by some other process).
!!!tip
Read more about `load()` method in [models methods](../models/methods.md#load)
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
### get
Works exactly the same as [get](./#get) function above but allows you to fetch related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### get_or_none
Exact equivalent of get described above but instead of raising the exception returns `None` if no db record matching the criteria is found.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### get_or_create
Works exactly the same as [get_or_create](./#get_or_create) function above but allows
you to query or create related objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### first
Works exactly the same as [first](./#first) function above but allows you to query
related objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### all
Works exactly the same as [all](./#all) function above but allows you to query related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/select-columns.md 0000664 0000000 0000000 00000021647 15130200524 0022647 0 ustar 00root root 0000000 0000000 # Selecting subset of columns
To select only chosen columns of your model you can use following functions.
* `fields(columns: Union[List, str, set, dict]) -> QuerySet`
* `exclude_fields(columns: Union[List, str, set, dict]) -> QuerySet`
* `QuerysetProxy`
* `QuerysetProxy.fields(columns: Union[List, str, set, dict])` method
* `QuerysetProxy.exclude_fields(columns: Union[List, str, set, dict])` method
## fields
`fields(columns: Union[List, str, set, dict]) -> QuerySet`
With `fields()` you can select subset of model columns to limit the data load.
!!!note
Note that `fields()` and `exclude_fields()` works both for main models (on
normal queries like `get`, `all` etc.)
as well as `select_related` and `prefetch_related` models (with nested notation).
Given a sample data like following:
```python
--8<-- "../docs_src/select_columns/docs001.py"
```
You can select specified fields by passing a `str, List[str], Set[str] or dict` with
nested definition.
To include related models use
notation `{related_name}__{column}[__{optional_next} etc.]`.
```python hl_lines="1-6"
all_cars = await (
Car.objects
.select_related('manufacturer')
.fields(['id', 'name', 'manufacturer__name'])
.all()
)
for car in all_cars:
# excluded columns will yield None
assert all(getattr(car, x) is None for x in ['year', 'gearbox_type', 'gears', 'aircon_type'])
# included column on related models will be available, pk column is always included
# even if you do not include it in fields list
assert car.manufacturer.name == 'Toyota'
# also in the nested related models - you cannot exclude pk - it's always auto added
assert car.manufacturer.founded is None
```
`fields()` can be called several times, building up the columns to select.
If you include related models into `select_related()` call but you won't specify columns
for those models in fields
- implies a list of all fields for those nested models.
```python hl_lines="1-7"
all_cars = await (
Car.objects
.select_related('manufacturer')
.fields('id')
.fields(['name'])
.all()
)
# all fields from company model are selected
assert all_cars[0].manufacturer.name == 'Toyota'
assert all_cars[0].manufacturer.founded == 1937
```
!!!warning
Mandatory fields cannot be excluded as it will raise `ValidationError`, to
exclude a field it has to be nullable.
The `values()` method can be used to exclude mandatory fields, though data will
be returned as a `dict`.
You cannot exclude mandatory model columns - `manufacturer__name` in this example.
```python
await (
Car.objects
.select_related('manufacturer')
.fields(['id', 'name', 'manufacturer__founded'])
.all()
)
# will raise pydantic ValidationError as company.name is required
```
!!!tip
Pk column cannot be excluded - it's always auto added even if not explicitly
included.
You can also pass fields to include as dictionary or set.
To mark a field as included in a dictionary use it's name as key and ellipsis as value.
To traverse nested models use nested dictionaries.
To include fields at last level instead of nested dictionary a set can be used.
To include whole nested model specify model related field name and ellipsis.
Below you can see examples that are equivalent:
```python
# 1. like in example above
await (
Car.objects
.select_related('manufacturer')
.fields(['id', 'name', 'manufacturer__name'])
.all()
)
# 2. to mark a field as required use ellipsis
await (
Car.objects
.select_related('manufacturer')
.fields({'id': ...,
'name': ...,
'manufacturer': {
'name': ...
}
})
.all()
)
# 3. to include whole nested model use ellipsis
await (
Car.objects
.select_related('manufacturer')
.fields({'id': ...,
'name': ...,
'manufacturer': ...
})
.all()
)
# 4. to specify fields at last nesting level
# you can also use set - equivalent to 2. above
await (
Car.objects
.select_related('manufacturer')
.fields({'id': ...,
'name': ...,
'manufacturer': {'name'}
})
.all()
)
# 5. of course set can have multiple fields
await (
Car.objects
.select_related('manufacturer')
.fields({'id': ...,
'name': ...,
'manufacturer': {'name', 'founded'}
})
.all()
)
# 6. you can include all nested fields,
# but it will be equivalent of 3. above which is shorter
await (
Car.objects
.select_related('manufacturer')
.fields({'id': ...,
'name': ...,
'manufacturer': {'id', 'name', 'founded'}
})
.all()
)
```
!!!note
All methods that do not return the rows explicitly returns a QuerySet instance so
you can chain them together
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.objects.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
## exclude_fields
`exclude_fields(columns: Union[List, str, set, dict]) -> QuerySet`
With `exclude_fields()` you can select subset of model columns that will be excluded to
limit the data load.
It's the opposite of `fields()` method so check documentation above to see what options
are available.
Especially check above how you can pass also nested dictionaries and sets as a mask to
exclude fields from whole hierarchy.
!!!note
Note that `fields()` and `exclude_fields()` works both for main models (on
normal queries like `get`, `all` etc.)
as well as `select_related` and `prefetch_related` models (with nested notation).
Below you can find few simple examples:
```python
--8<-- "../docs_src/select_columns/docs001.py"
```
```python
# select manufacturer but only name,
# to include related models use notation {model_name}__{column}
all_cars = await (
Car.objects
.select_related('manufacturer')
.exclude_fields([
'year',
'gearbox_type',
'gears',
'aircon_type',
'company__founded'
])
.all()
)
for car in all_cars:
# excluded columns will yield None
assert all(getattr(car, x) is None
for x in [
'year',
'gearbox_type',
'gears',
'aircon_type'
])
# included column on related models will be available,
# pk column is always included
# even if you do not include it in fields list
assert car.manufacturer.name == 'Toyota'
# also in the nested related models,
# you cannot exclude pk - it's always auto added
assert car.manufacturer.founded is None
# fields() can be called several times,
# building up the columns to select
# models included in select_related
# but with no columns in fields list implies all fields
all_cars = await (
Car.objects
.select_related('manufacturer')
.exclude_fields('year')
.exclude_fields(['gear', 'gearbox_type'])
.all()
)
# all fields from company model are selected
assert all_cars[0].manufacturer.name == 'Toyota'
assert all_cars[0].manufacturer.founded == 1937
# cannot exclude mandatory model columns,
# company__name in this example - note usage of dict/set this time
await (
Car.objects
.select_related('manufacturer')
.exclude_fields([{'company': {'name'}}])
.all()
)
# will raise pydantic ValidationError as company.name is required
```
!!!warning
Mandatory fields cannot be excluded as it will raise `ValidationError`, to
exclude a field it has to be nullable.
The `values()` method can be used to exclude mandatory fields, though data will
be returned as a `dict`.
!!!tip
Pk column cannot be excluded - it's always auto added even if explicitly
excluded.
!!!note
All methods that do not return the rows explicitly returns a QuerySet instance so
you can chain them together
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.object.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey`
returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create,
select related etc related models directly from parent model.
### fields
Works exactly the same as [fields](./#fields) function above but allows you to select columns from related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
### exclude_fields
Works exactly the same as [exclude_fields](./#exclude_fields) function above but allows you to select columns from related
objects from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[querysetproxy]: ../relations/queryset-proxy.md
collerek-ormar-c09209a/docs/queries/update.md 0000664 0000000 0000000 00000007200 15130200524 0021161 0 ustar 00root root 0000000 0000000 # Update data in database
Following methods and functions allow updating existing data in the database.
* `update(each: bool = False, **kwargs) -> int`
* `update_or_create(**kwargs) -> Model`
* `bulk_update(objects: List[Model], columns: List[str] = None) -> None`
* `Model`
* `Model.update()` method
* `Model.upsert()` method
* `Model.save_related()` method
* `QuerysetProxy`
* `QuerysetProxy.update_or_create(**kwargs)` method
## update
`update(each: bool = False, **kwargs) -> int`
QuerySet level update is used to update multiple records with the same value at once.
You either have to filter the QuerySet first or provide a `each=True` flag to update
whole table.
If you do not provide this flag or a filter a `QueryDefinitionError` will be raised.
Return number of rows updated.
```Python hl_lines="42-44"
--8<-- "../docs_src/queries/docs002.py"
```
!!!warning
Queryset needs to be filtered before updating to prevent accidental overwrite.
To update whole database table `each=True` needs to be provided as a safety switch
## update_or_create
`update_or_create(**kwargs) -> Model`
Updates the model, or in case there is no match in database creates a new one.
```Python hl_lines="40-48"
--8<-- "../docs_src/queries/docs003.py"
```
!!!note
Note that if you want to create a new object you either have to pass pk column
value or pk column has to be set as autoincrement
## bulk_update
`bulk_update(objects: List["Model"], columns: List[str] = None) -> None`
Allows to update multiple instance at once.
All `Models` passed need to have primary key column populated.
You can also select which fields to update by passing `columns` list as a list of string
names.
```python hl_lines="8"
# continuing the example from bulk_create
# update objects
for todo in todoes:
todo.completed = False
# perform update of all objects at once
# objects need to have pk column set, otherwise exception is raised
await ToDo.objects.bulk_update(todoes)
completed = await ToDo.objects.filter(completed=False).all()
assert len(completed) == 3
```
## Model methods
Each model instance have a set of methods to `save`, `update` or `load` itself.
###update
You can update models by updating your model attributes (fields) and calling `update()` method.
If you try to update a model without a primary key set a `ModelPersistenceError` exception will be thrown.
!!!tip
Read more about `update()` method in [models-update](../models/methods.md#update)
###upsert
It's a proxy to either `save()` or `update(**kwargs)` methods of a Model.
If the pk is set the `update()` method will be called.
!!!tip
Read more about `upsert()` method in [models-upsert][models-upsert]
###save_related
Method goes through all relations of the `Model` on which the method is called,
and calls `upsert()` method on each model that is **not** saved.
!!!tip
Read more about `save_related()` method in [models-save-related][models-save-related]
## QuerysetProxy methods
When access directly the related `ManyToMany` field as well as `ReverseForeignKey` returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create, select related etc related models directly from parent model.
### update_or_create
Works exactly the same as [update_or_create](./#update_or_create) function above but allows you to update or create related objects
from other side of the relation.
!!!tip
To read more about `QuerysetProxy` visit [querysetproxy][querysetproxy] section
[querysetproxy]: ../relations/queryset-proxy.md
[models-upsert]: ../models/methods.md#upsert
[models-save-related]: ../models/methods.md#save_related
collerek-ormar-c09209a/docs/relations/ 0000775 0000000 0000000 00000000000 15130200524 0017701 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs/relations/foreign-key.md 0000664 0000000 0000000 00000023267 15130200524 0022454 0 ustar 00root root 0000000 0000000 # ForeignKey
`ForeignKey(to: Model, *, name: str = None, unique: bool = False, nullable: bool = True,
related_name: str = None, virtual: bool = False, onupdate: Union[ReferentialAction, str] = None,
ondelete: Union[ReferentialAction, str] = None, **kwargs: Any)`
has required parameters `to` that takes target `Model` class.
Sqlalchemy column and Type are automatically taken from target `Model`.
* Sqlalchemy column: class of a target `Model` primary key column
* Type (used for pydantic): type of a target `Model`
## Defining Models
To define a relation add `ForeignKey` field that points to related `Model`.
```Python hl_lines="30"
--8<-- "../docs_src/fields/docs003.py"
```
## Reverse Relation
`ForeignKey` fields are automatically registering reverse side of the relation.
By default it's child (source) `Model` name + s, like courses in snippet below:
```Python hl_lines="29 36"
--8<-- "../docs_src/fields/docs001.py"
```
Reverse relation exposes API to manage related objects also from parent side.
### Skipping reverse relation
If you are sure you don't want the reverse relation you can use `skip_reverse=True`
flag of the `ForeignKey`.
If you set `skip_reverse` flag internally the field is still registered on the other
side of the relationship so you can:
* `filter` by related models fields from reverse model
* `order_by` by related models fields from reverse model
But you cannot:
* Access the related field from reverse model with `related_name`
* Even if you `select_related` from reverse side of the model the returned models won't be populated in reversed instance (the join is not prevented so you still can `filter` and `order_by` over the relation)
* The relation won't be populated in `model_dump()` and `model_dump_json()`
* You cannot pass the nested related objects when populating from dictionary or json (also through `fastapi`). It will be either ignored or error will be raised depending on `extra` setting in pydantic `Config`.
Example:
```python
class Author(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=80)
last_name: str = ormar.String(max_length=80)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
author: Optional[Author] = ormar.ForeignKey(Author, skip_reverse=True)
# create sample data
author = Author(first_name="Test", last_name="Author")
post = Post(title="Test Post", author=author)
assert post.author == author # ok
assert author.posts # Attribute error!
# but still can use in order_by
authors = (
await Author.objects.select_related("posts").order_by("posts__title").all()
)
assert authors[0].first_name == "Test"
# note that posts are not populated for author even if explicitly
# included in select_related - note no posts in model_dump()
assert author.model_dump(exclude={"id"}) == {"first_name": "Test", "last_name": "Author"}
# still can filter through fields of related model
authors = await Author.objects.filter(posts__title="Test Post").all()
assert authors[0].first_name == "Test"
assert len(authors) == 1
```
### add
Adding child model from parent side causes adding related model to currently loaded parent relation,
as well as sets child's model foreign key value and updates the model.
```python
department = await Department(name="Science").save()
course = Course(name="Math", completed=False) # note - not saved
await department.courses.add(course)
assert course.pk is not None # child model was saved
# relation on child model is set and FK column saved in db
assert course.department == department
# relation on parent model is also set
assert department.courses[0] == course
```
!!!warning
If you want to add child model on related model the primary key value for parent model **has to exist in database**.
Otherwise ormar will raise `RelationshipInstanceError` as it cannot set child's ForeignKey column value
if parent model has no primary key value.
That means that in example above the department has to be saved before you can call `department.courses.add()`.
!!!warning
This method will not work on `ManyToMany` relations - there, both sides of the relation have to be saved before adding to relation.
### remove
Removal of the related model one by one.
In reverse relation calling `remove()` does not remove the child model, but instead nulls it ForeignKey value.
```python
# continuing from above
await department.courses.remove(course)
assert len(department.courses) == 0
# course still exists and was saved in remove
assert course.pk is not None
assert course.department is None
# to remove child from db
await course.delete()
```
But if you want to clear the relation and delete the child at the same time you can issue:
```python
# this will not only clear the relation
# but also delete related course from db
await department.courses.remove(course, keep_reversed=False)
```
### clear
Removal of all related models in one call.
Like with remove, by default, `clear()` nulls the ForeigKey column on child model (all, not matter if they are loaded or not).
```python
# nulls department column on all courses related to this department
await department.courses.clear()
```
If you want to remove the children altogether from the database, set `keep_reversed=False`
```python
# deletes from db all courses related to this department
await department.courses.clear(keep_reversed=False)
```
## QuerysetProxy
Reverse relation exposes QuerysetProxy API that allows you to query related model like you would issue a normal Query.
To read which methods of QuerySet are available read below [querysetproxy][querysetproxy]
## related_name
You can overwrite related model field name by providing `related_name` parameter like below:
```Python hl_lines="27-29 35"
--8<-- "../docs_src/fields/docs002.py"
```
!!!tip
The reverse relation on access returns list of `wekref.proxy` to avoid circular references.
!!!warning
When you provide multiple relations to the same model `ormar` can no longer auto generate
the `related_name` for you. Therefore, in that situation you **have to** provide `related_name`
for all but one (one can be default and generated) or all related fields.
## Referential Actions
When an object referenced by a ForeignKey is changed (deleted or updated),
ormar will set the SQL constraint specified by the `ondelete` and `onupdate` argument.
The possible values for `ondelete` and `onupdate` are found in `ormar.ReferentialAction`:
!!!note
Instead of `ormar.ReferentialAction`, you can directly pass string values to these two arguments, but this is not recommended because it will break the integrity.
### CASCADE
Whenever rows in the parent (referenced) table are deleted (or updated), the respective rows of the child (referencing) table with a matching foreign key column will be deleted (or updated) as well. This is called a cascade delete (or update).
### RESTRICT
A value cannot be updated or deleted when a row exists in a referencing or child table that references the value in the referenced table.
Similarly, a row cannot be deleted as long as there is a reference to it from a referencing or child table.
### SET_NULL
Set the ForeignKey to `None`; this is only possible if `nullable` is True.
### SET_DEFAULT
Set the ForeignKey to its default value; a `server_default` for the ForeignKey must be set.
!!!note
Note that the `default` value is not allowed and you must do this through `server_default`, which you can read about in [this section][server_default].
### DO_NOTHING
Take `NO ACTION`; NO ACTION and RESTRICT are very much alike. The main difference between NO ACTION and RESTRICT is that with NO ACTION the referential integrity check is done after trying to alter the table. RESTRICT does the check before trying to execute the UPDATE or DELETE statement. Both referential actions act the same if the referential integrity check fails: the UPDATE or DELETE statement will result in an error.
## Relation Setup
You have several ways to set-up a relationship connection.
### `Model` instance
The most obvious one is to pass a related `Model` instance to the constructor.
```Python hl_lines="35-36"
--8<-- "../docs_src/relations/docs001.py"
```
### Primary key value
You can setup the relation also with just the pk column value of the related model.
```Python hl_lines="38-39"
--8<-- "../docs_src/relations/docs001.py"
```
### Dictionary
Next option is with a dictionary of key-values of the related model.
You can build the dictionary yourself or get it from existing model with `model_dump()` method.
```Python hl_lines="41-42"
--8<-- "../docs_src/relations/docs001.py"
```
### None
Finally you can explicitly set it to None (default behavior if no value passed).
```Python hl_lines="44-45"
--8<-- "../docs_src/relations/docs001.py"
```
!!!warning
In all not None cases the primary key value for related model **has to exist in database**.
Otherwise an IntegrityError will be raised by your database driver library.
[queries]: ./queries.md
[querysetproxy]: ./queryset-proxy.md
[get]: ./queries.md#get
[all]: ./queries.md#all
[create]: ./queries.md#create
[get_or_create]: ./queries.md#get_or_create
[update_or_create]: ./queries.md#update_or_create
[filter]: ./queries.md#filter
[exclude]: ./queries.md#exclude
[select_related]: ./queries.md#select_related
[prefetch_related]: ./queries.md#prefetch_related
[limit]: ./queries.md#limit
[offset]: ./queries.md#offset
[count]: ./queries.md#count
[exists]: ./queries.md#exists
[fields]: ./queries.md#fields
[exclude_fields]: ./queries.md#exclude_fields
[order_by]: ./queries.md#order_by
[server_default]: ../fields/common-parameters.md#server-default
collerek-ormar-c09209a/docs/relations/index.md 0000664 0000000 0000000 00000015216 15130200524 0021337 0 ustar 00root root 0000000 0000000 # Relations
Currently `ormar` supports two types of relations:
* One-to-many (and many-to-one) with `ForeignKey` field
* Many-to-many with `ManyToMany` field
Below you can find a very basic examples of definitions for each of those relations.
To read more about methods, possibilities, definition etc. please read the subsequent section of the documentation.
## ForeignKey
To define many-to-one relation use `ForeignKey` field.
```Python hl_lines="26"
--8<-- "../docs_src/relations/docs003.py"
```
!!!tip
To read more about one-to-many relations visit [foreign-keys][foreign-keys] section
## Reverse ForeignKey
The definition of one-to-many relation also uses `ForeignKey`, and it's registered for you automatically.
So in relation to example above.
```Python hl_lines="7-8"
class Department(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
# there is a virtual field here like follows
courses: Optional[List[Course]] = ormar.ForeignKey(Course, virtual=True)
# note that you DO NOT define it yourself, ormar does it for you.
```
!!!tip
To read more about many-to-one relations (i.e changing the name of generated field) visit [foreign-keys][foreign-keys] section
!!!tip
Reverse ForeignKey allows you to query the related models with [queryset-proxy][queryset-proxy].
It allows you to use `await department.courses.all()` to fetch data related only to specific department etc.
##ManyToMany
To define many-to-many relation use `ManyToMany` field.
```python hl_lines="19"
class Category(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="categories",
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Post(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category)
```
!!!tip
To read more about many-to-many relations visit [many-to-many][many-to-many] section
!!!tip
ManyToMany allows you to query the related models with [queryset-proxy][queryset-proxy].
It allows you to use `await post.categories.all()` but also `await category.posts.all()` to fetch data related only to specific post, category etc.
## Through fields
As part of the `ManyToMany` relation you can define a through model, that can contain additional
fields that you can use to filter, order etc. Fields defined like this are exposed on the reverse
side of the current query for m2m models.
So if you query from model `A` to model `B`, only model `B` has through field exposed.
Which kind of make sense, since it's a one through model/field for each of related models.
```python hl_lines="12-21"
class Category(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="categories",
)
id = ormar.Integer(primary_key=True)
name = ormar.String(max_length=40)
# you can specify additional fields on through model
class PostCategory(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="posts_x_categories",
)
id: int = ormar.Integer(primary_key=True)
sort_order: int = ormar.Integer(nullable=True)
param_name: str = ormar.String(default="Name", max_length=200)
class Post(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories = ormar.ManyToMany(Category, through=PostCategory)
```
!!!tip
To read more about many-to-many relations and through fields visit [many-to-many][many-to-many] section
!!!tip
ManyToMany allows you to query the related models with [queryset-proxy][queryset-proxy].
It allows you to use `await post.categories.all()` but also `await category.posts.all()` to fetch data related only to specific post, category etc.
## Relationship default sort order
By default relations follow model default sort order so `primary_key` column ascending, or any sort order se in `ormar_config` object.
!!!tip
To read more about models sort order visit [models](../models/index.md#model-sort-order) section of documentation
But you can modify the order in which related models are loaded during query by providing `orders_by` and `related_orders_by`
parameters to relations.
In relations you can sort only by directly related model columns or for `ManyToMany`
columns also `Through` model columns `{through_field_name}__{column_name}`
Sample configuration might look like this:
```python hl_lines="23"
database = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData()
base_ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(
Author, orders_by=["name"], related_orders_by=["-year"]
)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
ranking: int = ormar.Integer(nullable=True)
```
Now calls:
`await Author.objects.select_related("books").get()` - the books will be sorted by the book year descending
`await Book.objects.select_related("author").all()` - the authors will be sorted by author name ascending
## Self-reference and postponed references
In order to create auto-relation or create two models that reference each other in at least two
different relations (remember the reverse side is auto-registered for you), you need to use
`ForwardRef` from `typing` module.
```python hl_lines="1 9 12"
PersonRef = ForwardRef("Person")
class Person(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
supervisor: PersonRef = ormar.ForeignKey(PersonRef, related_name="employees")
Person.update_forward_refs()
```
!!!tip
To read more about self-reference and postponed relations visit [postponed-annotations][postponed-annotations] section
[foreign-keys]: ./foreign-key.md
[many-to-many]: ./many-to-many.md
[queryset-proxy]: ./queryset-proxy.md
[postponed-annotations]: ./postponed-annotations.md
collerek-ormar-c09209a/docs/relations/many-to-many.md 0000664 0000000 0000000 00000033041 15130200524 0022552 0 ustar 00root root 0000000 0000000 # ManyToMany
`ManyToMany(to, through)` has required parameters `to` and optional `through` that takes target and relation `Model` classes.
Sqlalchemy column and Type are automatically taken from target `Model`.
* Sqlalchemy column: class of a target `Model` primary key column
* Type (used for pydantic): type of a target `Model`
## Defining Models
```Python hl_lines="34"
--8<-- "../docs_src/relations/docs002.py"
```
Create sample data:
```Python
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
```
## Reverse relation
`ForeignKey` fields are automatically registering reverse side of the relation.
By default it's child (source) `Model` name + s, like `posts` in snippet below:
```python hl_lines="25-26"
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category)
# create some sample data
post = await Post.objects.create(title="Hello, M2M")
news = await Category.objects.create(name="News")
await post.categories.add(news)
# now you can query and access from both sides:
post_check = Post.objects.select_related("categories").get()
assert post_check.categories[0] == news
# query through auto registered reverse side
category_check = Category.objects.select_related("posts").get()
assert category_check.posts[0] == post
```
Reverse relation exposes API to manage related objects also from parent side.
### related_name
By default, the related_name is generated in the same way as for the `ForeignKey` relation (class.name.lower()+'s'),
but in the same way you can overwrite this name by providing `related_name` parameter like below:
```Python
categories: Optional[Union[Category, List[Category]]] = ormar.ManyToMany(
Category, through=PostCategory, related_name="new_categories"
)
```
!!!warning
When you provide multiple relations to the same model `ormar` can no longer auto generate
the `related_name` for you. Therefore, in that situation you **have to** provide `related_name`
for all but one (one can be default and generated) or all related fields.
### Skipping reverse relation
If you are sure you don't want the reverse relation you can use `skip_reverse=True`
flag of the `ManyToMany`.
If you set `skip_reverse` flag internally the field is still registered on the other
side of the relationship so you can:
* `filter` by related models fields from reverse model
* `order_by` by related models fields from reverse model
But you cannot:
* access the related field from reverse model with `related_name`
* even if you `select_related` from reverse side of the model the returned models won't be populated in reversed instance (the join is not prevented so you still can `filter` and `order_by` over the relation)
* the relation won't be populated in `model_dump()` and `json()`
* you cannot pass the nested related objects when populating from dictionary or json (also through `fastapi`). It will be either ignored or error will be raised depending on `extra` setting in pydantic `Config`.
Example:
```python
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category, skip_reverse=True)
# create some sample data
post = await Post.objects.create(title="Hello, M2M")
news = await Category.objects.create(name="News")
await post.categories.add(news)
assert post.categories[0] == news # ok
assert news.posts # Attribute error!
# but still can use in order_by
categories = (
await Category.objects.select_related("posts").order_by("posts__title").all()
)
assert categories[0].first_name == "Test"
# note that posts are not populated for author even if explicitly
# included in select_related - note no posts in model_dump()
assert news.model_dump(exclude={"id"}) == {"name": "News"}
# still can filter through fields of related model
categories = await Category.objects.filter(posts__title="Hello, M2M").all()
assert categories[0].name == "News"
assert len(categories) == 1
```
## Through Model
Optionally if you want to add additional fields you can explicitly create and pass
the through model class.
```Python hl_lines="19-24 32"
--8<-- "../docs_src/relations/docs004.py"
```
!!!warning
Note that even of you do not provide through model it's going to be created for you automatically and
still has to be included in example in `alembic` migrations.
!!!tip
Note that you need to provide `through` model if you want to
customize the `Through` model name or the database table name of this model.
If you do not provide the Through field it will be generated for you.
The default naming convention is:
* for class name it's union of both classes name (parent+other) so in example above
it would be `PostCategory`
* for table name it similar but with underscore in between and s in the end of class
lowercase name, in example above would be `posts_categorys`
### Customizing Through relation names
By default `Through` model relation names default to related model name in lowercase.
So in example like this:
```python
... # course declaration omitted
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
courses = ormar.ManyToMany(Course)
# will produce default Through model like follows (example simplified)
class StudentCourse(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="students_courses")
id: int = ormar.Integer(primary_key=True)
student = ormar.ForeignKey(Student) # default name
course = ormar.ForeignKey(Course) # default name
```
To customize the names of fields/relation in Through model now you can use new parameters to `ManyToMany`:
* `through_relation_name` - name of the field leading to the model in which `ManyToMany` is declared
* `through_reverse_relation_name` - name of the field leading to the model to which `ManyToMany` leads to
Example:
```python
... # course declaration omitted
base_ormar_config = ormar.OrmarConfig(
database=databases.Database("sqlite:///db.sqlite"),
metadata=sqlalchemy.MetaData(),
)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
courses = ormar.ManyToMany(Course,
through_relation_name="student_id",
through_reverse_relation_name="course_id")
# will produce Through model like follows (example simplified)
class StudentCourse(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="student_courses")
id: int = ormar.Integer(primary_key=True)
student_id = ormar.ForeignKey(Student) # set by through_relation_name
course_id = ormar.ForeignKey(Course) # set by through_reverse_relation_name
```
!!!note
Note that explicitly declaring relations in Through model is forbidden, so even if you
provide your own custom Through model you cannot change the names there and you need to use
same `through_relation_name` and `through_reverse_relation_name` parameters.
## Through Fields
The through field is auto added to the reverse side of the relation.
The exposed field is named as lowercase `Through` class name.
The exposed field **explicitly has no relations loaded** as the relation is already populated in `ManyToMany` field,
so it's useful only when additional fields are provided on `Through` model.
In a sample model setup as following:
```Python hl_lines="19-24 32"
--8<-- "../docs_src/relations/docs004.py"
```
the through field can be used as a normal model field in most of the QuerySet operations.
Note that through field is attached only to related side of the query so:
```python
post = await Post.objects.select_related("categories").get()
# source model has no through field
assert post.postcategory is None
# related models have through field
assert post.categories[0].postcategory is not None
# same is applicable for reversed query
category = await Category.objects.select_related("posts").get()
assert category.postcategory is None
assert category.posts[0].postcategory is not None
```
Through field can be used for filtering the data.
```python
post = (
await Post.objects.select_related("categories")
.filter(postcategory__sort_order__gt=1)
.get()
)
```
!!!tip
Note that despite that the actual instance is not populated on source model,
in queries, order by statements etc you can access through model from both sides.
So below query has exactly the same effect (note access through `categories`)
```python
post = (
await Post.objects.select_related("categories")
.filter(categories__postcategory__sort_order__gt=1)
.get()
)
```
Through model can be used in order by queries.
```python
post = (
await Post.objects.select_related("categories")
.order_by("-postcategory__sort_order")
.get()
)
```
You can also select subset of the columns in a normal `QuerySet` way with `fields`
and `exclude_fields`.
```python
post2 = (
await Post.objects.select_related("categories")
.exclude_fields("postcategory__param_name")
.get()
)
```
!!!warning
Note that because through fields explicitly nullifies all relation fields, as relation
is populated in ManyToMany field, you should not use the standard model methods like
`save()` and `update()` before re-loading the field from database.
If you want to modify the through field in place remember to reload it from database.
Otherwise you will set relations to None so effectively make the field useless!
```python
# always reload the field before modification
await post2.categories[0].postcategory.load()
# only then update the field
await post2.categories[0].postcategory.update(sort_order=3)
```
Note that reloading the model effectively reloads the relations as `pk_only` models
(only primary key is set) so they are not fully populated, but it's enough to preserve
the relation on update.
!!!warning
If you use i.e. `fastapi` the partially loaded related models on through field might cause
`pydantic` validation errors (that's the primary reason why they are not populated by default).
So either you need to exclude the related fields in your response, or fully load the related
models. In example above it would mean:
```python
await post2.categories[0].postcategory.post.load()
await post2.categories[0].postcategory.category.load()
```
Alternatively you can use `load_all()`:
```python
await post2.categories[0].postcategory.load_all()
```
**Preferred way of update is through queryset proxy `update()` method**
```python
# filter the desired related model with through field and update only through field params
await post2.categories.filter(name='Test category').update(postcategory={"sort_order": 3})
```
## Relation methods
### add
`add(item: Model, **kwargs)`
Allows you to add model to ManyToMany relation.
```python
# Add a category to a post.
await post.categories.add(news)
# or from the other end:
await news.posts.add(post)
```
!!!warning
In all not `None` cases the primary key value for related model **has to exist in database**.
Otherwise an IntegrityError will be raised by your database driver library.
If you declare your models with a Through model with additional fields, you can populate them
during adding child model to relation.
In order to do so, pass keyword arguments with field names and values to `add()` call.
Note that this works only for `ManyToMany` relations.
```python
post = await Post(title="Test post").save()
category = await Category(name="Test category").save()
# apart from model pass arguments referencing through model fields
await post.categories.add(category, sort_order=1, param_name='test')
```
### remove
Removal of the related model one by one.
Removes also the relation in the database.
```python
await news.posts.remove(post)
```
### clear
Removal of all related models in one call.
Removes also the relation in the database.
```python
await news.posts.clear()
```
### QuerysetProxy
Reverse relation exposes QuerysetProxy API that allows you to query related model like you would issue a normal Query.
To read which methods of QuerySet are available read below [querysetproxy][querysetproxy]
[queries]: ./queries.md
[querysetproxy]: ./queryset-proxy.md
[get]: ./queries.md#get
[all]: ./queries.md#all
[create]: ./queries.md#create
[get_or_create]: ./queries.md#get_or_create
[update_or_create]: ./queries.md#update_or_create
[filter]: ./queries.md#filter
[exclude]: ./queries.md#exclude
[select_related]: ./queries.md#select_related
[prefetch_related]: ./queries.md#prefetch_related
[limit]: ./queries.md#limit
[offset]: ./queries.md#offset
[count]: ./queries.md#count
[exists]: ./queries.md#exists
[fields]: ./queries.md#fields
[exclude_fields]: ./queries.md#exclude_fields
[order_by]: ./queries.md#order_by
collerek-ormar-c09209a/docs/relations/postponed-annotations.md 0000664 0000000 0000000 00000010644 15130200524 0024576 0 ustar 00root root 0000000 0000000 # Postponed annotations
## Self-referencing Models
When you want to reference the same model during declaration to create a
relation you need to declare the referenced model as a `ForwardRef`, as during the declaration
the class is not yet ready and python by default won't let you reference it.
Although you might be tempted to use __future__ annotations or simply quote the name with `""` it won't work
as `ormar` is designed to work with explicitly declared `ForwardRef`.
First, you need to import the required ref from typing.
```python
from typing import ForwardRef
```
Now we need a sample model and a reference to the same model,
which will be used to create a self referencing relation.
```python
# create the forwardref to model Person
PersonRef = ForwardRef("Person")
class Person(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
# use the forwardref as to parameter
supervisor: PersonRef = ormar.ForeignKey(PersonRef, related_name="employees")
```
That's so simple. But before you can use the model you need to manually update the references
so that they lead to the actual models.
!!!warning
If you try to use the model without updated references, `ModelError` exception will be raised.
So in our example above any call like following will cause exception
```python
# creation of model - exception
await Person.objects.create(name="Test")
# initialization of model - exception
Person2(name="Test")
# usage of model's QuerySet - exception
await Person2.objects.get()
```
To update the references call the `update_forward_refs` method on **each model**
with forward references, only **after all related models were declared.**
So in order to make our previous example work we need just one extra line.
```python hl_lines="14"
PersonRef = ForwardRef("Person")
class Person(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
supervisor: PersonRef = ormar.ForeignKey(PersonRef, related_name="employees")
Person.update_forward_refs()
```
Of course the same can be done with ManyToMany relations in exactly same way, both for to
and through parameters.
```python
# declare the reference
ChildRef = ForwardRef("Child")
class ChildFriend(ormar.Model):
ormar_config = base_ormar_config.copy()
class Child(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
# use it in relation
friends = ormar.ManyToMany(ChildRef, through=ChildFriend,
related_name="also_friends")
Child.update_forward_refs()
```
## Cross model relations
The same mechanism and logic as for self-reference model can be used to link multiple different
models between each other.
Of course `ormar` links both sides of relation for you,
creating a reverse relation with specified (or default) `related_name`.
But if you need two (or more) relations between any two models, that for whatever reason
should be stored on both sides (so one relation is declared on one model,
and other on the second model), you need to use `ForwardRef` to achieve that.
Look at the following simple example.
```python
# teacher is not yet defined
TeacherRef = ForwardRef("Teacher")
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
# so we use reference instead of actual model
primary_teacher: TeacherRef = ormar.ForeignKey(TeacherRef,
related_name="own_students")
class StudentTeacher(ormar.Model):
ormar_config = base_ormar_config.copy(tablename='students_x_teachers')
class Teacher(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
# we need students for other relation hence the order
students = ormar.ManyToMany(Student, through=StudentTeacher,
related_name="teachers")
# now the Teacher model is already defined we can update references
Student.update_forward_refs()
```
!!!warning
Remember that `related_name` needs to be unique across related models regardless
of how many relations are defined.
collerek-ormar-c09209a/docs/relations/queryset-proxy.md 0000664 0000000 0000000 00000023413 15130200524 0023266 0 ustar 00root root 0000000 0000000 # QuerySetProxy
When access directly the related `ManyToMany` field as well as `ReverseForeignKey` returns the list of related models.
But at the same time it exposes subset of QuerySet API, so you can filter, create, select related etc related models directly from parent model.
!!!note
By default exposed QuerySet is already filtered to return only `Models` related to parent `Model`.
So if you issue `post.categories.all()` you will get all categories related to that post, not all in table.
!!!note
Note that when accessing QuerySet API methods through QuerysetProxy you don't
need to use `objects` attribute like in normal queries.
So note that it's `post.categories.all()` and **not** `post.categories.objects.all()`.
To learn more about available QuerySet methods visit [queries][queries]
!!!warning
Querying related models from ManyToMany cleans list of related models loaded on parent model:
Example: `post.categories.first()` will set post.categories to list of 1 related model -> the one returned by first()
Example 2: if post has 4 categories so `len(post.categories) == 4` calling `post.categories.limit(2).all()`
-> will load only 2 children and now `assert len(post.categories) == 2`
This happens for all QuerysetProxy methods returning data: `get`, `all` and `first` and in `get_or_create` if model already exists.
Note that value returned by `create` or created in `get_or_create` and `update_or_create`
if model does not exist will be added to relation list (not clearing it).
## Read data from database
### get
`get(**kwargs): -> Model`
To grab just one of related models filtered by name you can use `get(**kwargs)` method.
```python
# grab one category
assert news == await post.categories.get(name="News")
# note that method returns the category so you can grab this value
# but it also modifies list of related models in place
# so regardless of what was previously loaded on parent model
# now it has only one value -> just loaded with get() call
assert len(post.categories) == 1
assert post.categories[0] == news
```
!!!tip
Read more in queries documentation [get][get]
### get_or_create
`get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs) -> Tuple[Model, bool]`
Tries to get a row meeting the criteria and if `NoMatch` exception is raised it creates a new one with given kwargs and _defaults.
!!!tip
Read more in queries documentation [get_or_create][get_or_create]
### all
`all(**kwargs) -> List[Optional["Model"]]`
To get a list of related models use `all()` method.
Note that you can filter the queryset, select related, exclude fields etc. like in normal query.
```python
# with all Queryset methods - filtering, selecting columns, counting etc.
await news.posts.filter(title__contains="M2M").all()
await Category.objects.filter(posts__author=guido).get()
# columns models of many to many relation can be prefetched
news_posts = await news.posts.select_related("author").all()
assert news_posts[0].author == guido
```
!!!tip
Read more in queries documentation [all][all]
### iterate
`iterate(**kwargs) -> AsyncGenerator["Model"]`
To iterate on related models use `iterate()` method.
Note that you can filter the queryset, select related, exclude fields etc. like in normal query.
```python
# iterate on categories of this post with an async generator
async for category in post.categories.iterate():
print(category.name)
```
!!!tip
Read more in queries documentation [iterate][iterate]
## Insert/ update data into database
### create
`create(**kwargs): -> Model`
Create related `Model` directly from parent `Model`.
The link table is automatically populated, as well as relation ids in the database.
```python
# Creating columns object from instance:
await post.categories.create(name="Tips")
assert len(await post.categories.all()) == 2
# newly created instance already have relation persisted in the database
```
!!!tip
Read more in queries documentation [create][create]
For `ManyToMany` relations there is an additional functionality of passing parameters
that will be used to create a through model if you declared additional fields on explicitly
provided Through model.
Given sample like this:
```Python hl_lines="19-24 32"
--8<-- "../docs_src/relations/docs004.py"
```
You can populate fields on through model in the `create()` call in a following way:
```python
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1",
# in arguments pass a dictionary with name of the through field and keys
# corresponding to through model fields
postcategory={"sort_order": 1, "param_name": "volume"},
)
```
### get_or_create
`get_or_create(_defaults: Optional[Dict[str, Any]] = None, **kwargs) -> Tuple[Model, bool]`
Tries to get a row meeting the criteria and if NoMatch exception is raised it creates a new one with given kwargs.
!!!tip
Read more in queries documentation [get_or_create][get_or_create]
### update_or_create
`update_or_create(**kwargs) -> Model`
Updates the model, or in case there is no match in database creates a new one.
!!!tip
Read more in queries documentation [update_or_create][update_or_create]
### update
`update(**kwargs, each:bool = False) -> int`
Updates the related model with provided keyword arguments, return number of updated rows.
!!!tip
Read more in queries documentation [update][update]
Note that for `ManyToMany` relations update can also accept an argument with through field
name and a dictionary of fields.
```Python hl_lines="19-24 32"
--8<-- "../docs_src/relations/docs004.py"
```
In example above you can update attributes of `postcategory` in a following call:
```python
await post.categories.filter(name="Test category3").update(
postcategory={"sort_order": 4}
)
```
## Filtering and sorting
### filter
`filter(*args, **kwargs) -> QuerySet`
Allows you to filter by any Model attribute/field as well as to fetch instances, with a filter across an FK relationship.
!!!tip
Read more in queries documentation [filter][filter]
### exclude
`exclude(*args, **kwargs) -> QuerySet`
Works exactly the same as filter and all modifiers (suffixes) are the same, but returns a not condition.
!!!tip
Read more in queries documentation [exclude][exclude]
### order_by
`order_by(columns:Union[List, str]) -> QuerySet`
With order_by() you can order the results from database based on your choice of fields.
!!!tip
Read more in queries documentation [order_by][order_by]
## Joins and subqueries
### select_related
`select_related(related: Union[List, str]) -> QuerySet`
Allows to prefetch related models during the same query.
With select_related always only one query is run against the database, meaning that one (sometimes complicated) join is generated and later nested models are processed in python.
!!!tip
Read more in queries documentation [select_related][select_related]
### prefetch_related
`prefetch_related(related: Union[List, str]) -> QuerySet`
Allows to prefetch related models during query - but opposite to select_related each subsequent model is fetched in a separate database query.
With prefetch_related always one query per Model is run against the database, meaning that you will have multiple queries executed one after another.
!!!tip
Read more in queries documentation [prefetch_related][prefetch_related]
## Pagination and rows number
### paginate
`paginate(page: int, page_size: int = 20) -> QuerySet`
Combines the offset and limit methods based on page number and size.
!!!tip
Read more in queries documentation [paginate][paginate]
### limit
`limit(limit_count: int) -> QuerySet`
You can limit the results to desired number of parent models.
!!!tip
Read more in queries documentation [limit][limit]
### offset
`offset(offset: int) -> QuerySet`
You can offset the results by desired number of main models.
!!!tip
Read more in queries documentation [offset][offset]
## Selecting subset of columns
### fields
`fields(columns: Union[List, str, set, dict]) -> QuerySet`
With fields() you can select subset of model columns to limit the data load.
!!!tip
Read more in queries documentation [fields][fields]
### exclude_fields
`exclude_fields(columns: Union[List, str, set, dict]) -> QuerySet`
With exclude_fields() you can select subset of model columns that will be excluded to limit the data load.
!!!tip
Read more in queries documentation [exclude_fields][exclude_fields]
## Aggregated functions
### count
`count(distinct: bool = True) -> int`
Returns number of rows matching the given criteria (i.e. applied with filter and exclude)
!!!tip
Read more in queries documentation [count][count]
### exists
`exists() -> bool`
Returns a bool value to confirm if there are rows matching the given criteria (applied with filter and exclude)
!!!tip
Read more in queries documentation [exists][exists]
[queries]: ../queries/index.md
[get]: ../queries/read.md#get
[all]: ../queries/read.md#all
[iterate]: ../queries/read.md#iterate
[create]: ../queries/create.md#create
[get_or_create]: ../queries/read.md#get_or_create
[update_or_create]: ../queries/update.md#update_or_create
[update]: ../queries/update.md#update
[filter]: ../queries/filter-and-sort.md#filter
[exclude]: ../queries/filter-and-sort.md#exclude
[select_related]: ../queries/joins-and-subqueries.md#select_related
[prefetch_related]: ../queries/joins-and-subqueries.md#prefetch_related
[limit]: ../queries/pagination-and-rows-number.md#limit
[offset]: ../queries/pagination-and-rows-number.md#offset
[paginate]: ../queries/pagination-and-rows-number.md#paginate
[count]: ../queries/aggregations.md#count
[exists]: ../queries/aggregations.md#exists
[fields]: ../queries/select-columns.md#fields
[exclude_fields]: ../queries/select-columns.md#exclude_fields
[order_by]: ../queries/filter-and-sort.md#order_by
collerek-ormar-c09209a/docs/releases.md 0000664 0000000 0000000 00000204154 15130200524 0020034 0 ustar 00root root 0000000 0000000 # Release notes
## 0.21.0
### 🐛 Breaking changes
* Drop support for Python 3.8
* Remove the possibility to exclude parents' fields in children models (discouraged as bad practice anyway)
* Add support for Sqlalchemy 2.0 and drop for 1.4
### 💬 Other
* Bump dependencies to newer versions: among others pydantic, databases and fastapi
* Move setuptools to dev dependencies
* Solve vulnerabilities in dependencies
### 🐛 Fixes
* Fix mutable default argument in translate list to dict - thanks @cadlagtrader [#1382](https://github.com/collerek/ormar/pull/1382)
* Fix fastapi docs - thanks @inktrap [#1362](https://github.com/collerek/ormar/pull/1362)
* Fix clashing many to many fields names [#1407](https://github.com/collerek/ormar/pull/1407)
### 💬 Other
* Add official support for python 3.12 - thanks @ChristopherMacGown [#1395](https://github.com/collerek/ormar/pull/1395)
* Unpin pydantic allowing pydantic versions <2.9.0 - thanks @camillol [#1388](https://github.com/collerek/ormar/pull/1388)
## 0.20.2
### 🐛 Fixes
* Fix mutable default argument in translate list to dict - thanks @cadlagtrader [#1382](https://github.com/collerek/ormar/pull/1382)
* Fix fastapi docs - thanks @inktrap [#1362](https://github.com/collerek/ormar/pull/1362)
* Fix clashing many to many fields names [#1407](https://github.com/collerek/ormar/pull/1407)
### 💬 Other
* Add official support for python 3.12 - thanks @ChristopherMacGown [#1395](https://github.com/collerek/ormar/pull/1395)
* Unpin pydantic allowing pydantic versions <2.9.0 - thanks @camillol [#1388](https://github.com/collerek/ormar/pull/1388)
# Release notes
## 0.20.1
### ✨ Breaking changes
* Note that this is the first non-beta release of ormar with support for Pydantic v2. Check release notes for 0.20.0 and https://collerek.github.io/ormar/0.20.0b1/migration/
### 🐛 Fixes
* Fix merging same target models when using `select_related` with `prefetch_related` [#906](https://github.com/collerek/ormar/issues/906)
* Fix saving related with pk only models [#812](https://github.com/collerek/ormar/issues/812)
* Fix adding the same relation multiple times corrupting relation cache [#1335](https://github.com/collerek/ormar/issues/1335)
### ✨ Features
* Allow adding indexed on foreign keys by @cmflynn [#1276](https://github.com/collerek/ormar/pull/1276)
### 💬 Other
* Some docs fixes by @Chaoyingz, thanks!
## 0.20.0
### ✨ Breaking changes
* `ormar` Model configuration
Instead of defining a `Meta` class now each of the ormar models require an ormar_config parameter that is an instance of the `OrmarConfig` class.
Note that the attribute must be named `ormar_config` and be an instance of the config class.
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
# ormar < 0.20
class Album(ormar.Model):
class Meta:
database = database
metadata = metadata
tablename = "albums"
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
# ormar >= 0.20
class AlbumV20(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="albums_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
```
* `OrmarConfig` api/ parameters
The `ormar_config` expose the same set of settings as `Meta` class used to provide.
That means that you can use any of the following parameters initializing the config:
```python
metadata: Optional[sqlalchemy.MetaData]
database: Optional[databases.Database]
engine: Optional[sqlalchemy.engine.Engine]
tablename: Optional[str]
order_by: Optional[List[str]]
abstract: bool
queryset_class: Type[QuerySet]
extra: Extra
constraints: Optional[List[ColumnCollectionConstraint]]
```
* `BaseMeta` equivalent - best practice
Note that to reduce the duplication of code and ease of development it's still recommended to create a base config and provide each of the models with a copy.
OrmarConfig provides a convenient `copy` method for that purpose.
The `copy` method accepts the same parameters as `OrmarConfig` init, so you can overwrite if needed, but by default it will return already existing attributes, except for: `tablename`, `order_by` and `constraints` which by default are cleared.
```python hl_lines="5-8 11 20"
import databases
import ormar
import sqlalchemy
base_ormar_config = ormar.OrmarConfig(
database=databases.Database("sqlite:///db.sqlite"),
metadata=sqlalchemy.MetaData()
)
class AlbumV20(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="albums_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class TrackV20(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="tracks_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
```
* `choices` Field parameter is no longer supported.
Before version 0.20 you could provide `choices` parameter to any existing ormar Field to limit the accepted values.
This functionality was dropped, and you should use `ormar.Enum` field that was designed for this purpose.
If you want to keep the database field type (i.e. an Integer field) you can always write a custom validator.
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
# ormar < 0.20
class Artist(ormar.Model):
class Meta:
database = database
metadata = metadata
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
country: str = ormar.String(default=False, max_length=50, choices=["UK", "US", "Vietnam", "Colombia"])
# ormar >= 0.20
from enum import Enum
class Country(str, Enum):
UK = "UK"
US = "US"
VIETNAM = "Vietnam"
COLOMBIA = "Colombia"
class ArtistV20(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="artists_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
country: Country = ormar.Enum(enum_class=Country)
```
* `pydantic_only` Field parameter is no longer supported
`pydantic_only` fields were already deprecated and are removed in v 0.20. Ormar allows defining pydantic fields as in ordinary pydantic model.
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
# ormar < 0.20
class Dish(ormar.Model):
class Meta:
database = database
metadata = metadata
tablename = "dishes"
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
cook: str = ormar.String(max_length=40, pydantic_only=True, default="sam")
# ormar >= 0.20
class DishV20(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="dishes_v20"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
cook: str = "sam" # this is normal pydantic field
```
* `property_field` decorator is no longer supported
`property_field` decorator was used to provide a way to pass calculated fields that were included in dictionary/ serialized json representation of the model.
Version 2.X of pydantic introduced such a possibility, so you should now switch to the one native to the pydantic.
```python
import databases
import ormar
import sqlalchemy
import pydantic
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
# ormar < 0.20
class Employee(ormar.Model):
class Meta:
database = database
metadata = metadata
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=100)
last_name: str = ormar.String(max_length=100)
@ormar.property_field()
def full_name(self) -> str:
return f"{self.first_name} {self.last_name}"
# ormar >= 0.20
class EmployeeV20(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=100)
last_name: str = ormar.String(max_length=100)
@pydantic.computed_field()
def full_name(self) -> str:
return f"{self.first_name} {self.last_name}"
```
* Deprecated methods
All methods listed below are deprecated and will be removed in version 0.30 of `ormar`.
* `dict()` becomes the `model_dump()`
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="albums"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
album = Album(name="Dark Side of the Moon")
# ormar < 0.20
album_dict = album.dict()
# ormar >= 0.20
new_album_dict = album.model_dump()
```
Note that parameters remain the same i.e. `include`, `exclude` etc.
* `json()` becomes the `model_dump_json()`
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="albums"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
album = Album(name="Dark Side of the Moon")
# ormar < 0.20
album_json= album.json()
# ormar >= 0.20
new_album_dict = album.model_dump_json()
```
Note that parameters remain the same i.e. `include`, `exclude` etc.
* `construct()` becomes the `model_construct()`
```python
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="albums"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favorite: bool = ormar.Boolean(default=False)
params = {
"name": "Dark Side of the Moon",
"favorite": True,
}
# ormar < 0.20
album = Album.construct(**params)
# ormar >= 0.20
album = Album.model_construct(**params)
```
To read more about construct please refer to `pydantic` documentation.
##0.12.2
###✨ Features
* Bump support for `FastAPI` up to the newest version (0.97.0) [#1110](https://github.com/collerek/ormar/pull/1110)
* Add support and tests for `Python 3.11` [#1110](https://github.com/collerek/ormar/pull/1110)
##0.12.1
###✨ Features
* Massive performance improvements in area of loading the models due to recursive loads and caching of the models and related models. (by @erichaydel - thanks!) [#853](https://github.com/collerek/ormar/pull/948)
###💬 Internals
* Benchmarks for comparing performance effect of implemented changes in regard of trends (again, by @erichaydel - thanks!) [#853](https://github.com/collerek/ormar/pull/948)
##0.12.0
###✨ Breaking Changes
* `Queryset.bulk_create` will now raise `ModelListEmptyError` on empty list of models (by @ponytailer - thanks!) [#853](https://github.com/collerek/ormar/pull/853)
###✨ Features
* `Model.upsert()` now handles a flag `__force_save__`: `bool` that allow upserting the models regardless of the fact if they have primary key set or not.
Note that setting this flag will cause two queries for each upserted model -> `get` to check if model exists and later `update/insert` accordingly. [#889](https://github.com/collerek/ormar/pull/853)
###🐛 Fixes
* Fix for empty relations breaking `construct` method (by @Abdeldjalil-H - thanks!) [#870](https://github.com/collerek/ormar/issues/870)
* Fix save related not saving models with already set pks (including uuid) [#885](https://github.com/collerek/ormar/issues/885)
* Fix for wrong relations exclusions depending on the order of exclusions [#779](https://github.com/collerek/ormar/issues/779)
* Fix `property_fields` not being inherited properly [#774](https://github.com/collerek/ormar/issues/774)
##0.11.3
###✨ Features
* Document `onupdate` and `ondelete` referential actions in `ForeignKey` and provide `ReferentialAction` enum to specify the behavior of the relationship (by @SepehrBazyar - thanks!) [#724](https://github.com/collerek/ormar/issues/724)
* Add `CheckColumn` to supported constraints in models Meta (by @SepehrBazyar - thanks!) [#729](https://github.com/collerek/ormar/issues/729)
###🐛 Fixes
* Fix limiting query result to 0 should return empty list (by @SepehrBazyar - thanks!) [#766](https://github.com/collerek/ormar/issues/713)
###💬 Other
* Add dark mode to docs (by @SepehrBazyar - thanks!) [#717](https://github.com/collerek/ormar/pull/717)
* Update aiomysql dependency [#778](https://github.com/collerek/ormar/issues/778)
##0.11.2
###🐛 Fixes
* Fix database drivers being required, while they should be optional [#713](https://github.com/collerek/ormar/issues/713)
* Fix boolean field problem in `limit` queries in postgres without `limit_raw_sql` flag [#704](https://github.com/collerek/ormar/issues/704)
* Fix enum_class spilling to schema causing errors in OpenAPI [#699](https://github.com/collerek/ormar/issues/699)
##0.11.1
###🐛 Fixes
* Fix deepcopy issues introduced in pydantic 1.9 [#685](https://github.com/collerek/ormar/issues/685)
##0.11.0
###✨ Breaking Changes
* Dropped support for python 3.6
* `Queryset.get_or_create` returns now a tuple with model and bool value indicating if the model was created (by @MojixCoder - thanks!) [#554](https://github.com/collerek/ormar/pull/554)
* `Queryset.count()` now counts the number of distinct parent model rows by default, counting all rows is possible by setting `distinct=False` (by @erichaydel - thanks) [#588](https://github.com/collerek/ormar/pull/588)
###✨ Features
* Added support for python 3.10
###🐛 Fixes
* Fix inconsistent `JSON` fields behaviour in `save` and `bulk_create` [#584](https://github.com/collerek/ormar/issues/584)
* Fix maximum recursion error [#580](https://github.com/collerek/ormar/pull/580)
##0.10.25
###✨ Features
* Add `queryset_class` option to `Model.Meta` that allows you to easily swap `QuerySet` for your Model (by @ponytailer - thanks!) [#538](https://github.com/collerek/ormar/pull/538)
* Allow passing extra `kwargs` to `IndexColumns` that will be passed to sqlalchemy `Index` (by @zevisert - thanks) [#575](https://github.com/collerek/ormar/pull/538)
###🐛 Fixes
* Fix nullable setting on `JSON` fields [#529](https://github.com/collerek/ormar/issues/529)
* Fix bytes/str mismatch in bulk operations when using orjson instead of json (by @ponytailer - thanks!) [#538](https://github.com/collerek/ormar/pull/538)
##0.10.24
###✨ Features
* Add `post_bulk_update` signal (by @ponytailer - thanks!) [#524](https://github.com/collerek/ormar/pull/524)
###🐛 Fixes
* Fix support for `pydantic==1.9.0` [#502](https://github.com/collerek/ormar/issues/502)
* Fix timezone issues with datetime [#504](https://github.com/collerek/ormar/issues/504)
* Remove literal binds in query generation to unblock postgres arrays [#/tophat/ormar-postgres-extensions/9](https://github.com/tophat/ormar-postgres-extensions/pull/9)
* Fix bulk update for `JSON` fields [#519](https://github.com/collerek/ormar/issues/519)
###💬 Other
* Improve performance of `bulk_create` by bypassing `databases` `execute_many` suboptimal implementation. (by @Mng-dev-ai thanks!) [#520](https://github.com/collerek/ormar/pull/520)
* Bump min. required `databases` version to `>=5.4`.
##0.10.23
###✨ Features
* Add ability to pass `comment` to sqlalchemy when creating a column [#485](https://github.com/collerek/ormar/issues/485)
###🐛 Fixes
* Fix `LargeBinary` fields that can be nullable [#409](https://github.com/collerek/ormar/issues/409)
* Make `ormar.Model` pickable [#413](https://github.com/collerek/ormar/issues/413)
* Make `first()` and `get()` without arguments respect ordering of main model set by user, fallback to primary key (asc, and desc respectively) [#453](https://github.com/collerek/ormar/issues/453)
* Fix improper quoting of non-aliased join `on` clauses in postgress [#455](https://github.com/collerek/ormar/issues/455)
##0.10.22
###🐛 Fixes
* Hot fix for validators not being inherited when parent `ormar` model was set [#365](https://github.com/collerek/ormar/issues/365)
##0.10.21
###🐛 Fixes
* Add `ormar` implementation of `construct` classmethod that allows to build `Model` instances without validating the input to speed up the whole flow, if your data is already validated [#318](https://github.com/collerek/ormar/issues/318)
* Fix for "inheriting" field validators from `ormar` model when newly created pydanic model is generated with `get_pydantic` [#365](https://github.com/collerek/ormar/issues/365)
##0.10.20
###✨ Features
* Add `extra` parameter in `Model.Meta` that accepts `Extra.ignore` and `Extra.forbid` (default) and either ignores the extra fields passed to `ormar` model or raises an exception if one is encountered [#358](https://github.com/collerek/ormar/issues/358)
###🐛 Fixes
* Allow `None` if field is nullable and have choices set [#354](https://github.com/collerek/ormar/issues/354)
* Always set `primary_key` to `not null` regardless of `autoincrement` and explicit `nullable` setting to avoid problems with migrations [#348](https://github.com/collerek/ormar/issues/348)
##0.10.19
###✨ Features
* Add support for multi-column non-unique `IndexColumns` in `Meta.constraints` [#307](https://github.com/collerek/ormar/issues/307)
* Add `sql_nullable` field attribute that allows to set different nullable setting for pydantic model and for underlying sql column [#308](https://github.com/collerek/ormar/issues/308)
###🐛 Fixes
* Enable caching of relation map to increase performance [#337](https://github.com/collerek/ormar/issues/337)
* Clarify and fix documentation in regard of nullable fields [#339](https://github.com/collerek/ormar/issues/339)
###💬 Other
* Bump supported `databases` version to `<=5.2`.
##0.10.18
###🐛 Fixes
* Fix order of fields in pydantic models [#328](https://github.com/collerek/ormar/issues/328)
* Fix databases 0.5.0 support [#142](https://github.com/collerek/ormar/issues/142)
##0.10.17
###✨ Features
* Allow overwriting the default pydantic type for model fields [#312](https://github.com/collerek/ormar/issues/312)
* Add support for `sqlalchemy` >=1.4 (requires `databases` >= 0.5.0) [#142](https://github.com/collerek/ormar/issues/142)
##0.10.16
###✨ Features
* Allow passing your own pydantic `Config` to `ormar.Model` that will be merged with the default one by @naturalethic (thanks!) [#285](https://github.com/collerek/ormar/issues/285)
* Add `SmallInteger` field type by @ProgrammerPlus1998 (thanks!) [#297](https://github.com/collerek/ormar/pull/297)
###🐛 Fixes
* Fix generating openapi schema by removing obsolete pydantic field parameters that were directly exposed in schema [#291](https://github.com/collerek/ormar/issues/291)
* Fix unnecessary warning for auto generated through models [#295](https://github.com/collerek/ormar/issues/295)
##0.10.15
###🐛 Fixes
* Fix generating pydantic models tree with nested models (by @pawamoy - thanks!) [#278](https://github.com/collerek/ormar/issues/278)
* Fix missing f-string in warning about missing primary key field [#274](https://github.com/collerek/ormar/issues/274)
* Fix passing foreign key value as relation (additional guard, fixed already in the latest release) [#270](https://github.com/collerek/ormar/issues/270)
##0.10.14
###✨ Features
* Allow passing `timezone:bool = False` parameter to `DateTime` and `Time` fields for timezone aware database columns [#264](https://github.com/collerek/ormar/issues/264)
* Allow passing datetime, date and time for filter on `DateTime`, `Time` and `Date` fields to allow filtering by datetimes instead of converting the value to string [#79](https://github.com/collerek/ormar/issues/79)
###🐛 Fixes
* Fix dependencies from `psycopg2` to `psycopg2-binary` [#255](https://github.com/collerek/ormar/issues/255)
##0.10.13
###✨ Features
* Allow passing field accessors in `select_related` and `prefetch_related` aka. python style `select_related` [#225](https://github.com/collerek/ormar/issues/225).
* Previously:
```python
await Post.objects.select_related(["author", "categories"]).get()
await Author.objects.prefetch_related("posts__categories").get()
```
* Now also:
```python
await Post.objects.select_related([Post.author, Post.categories]).get()
await Author.objects.prefetch_related(Author.posts.categories).get()
```
###🐛 Fixes
* Fix overwriting default value for inherited primary key [#253](https://github.com/collerek/ormar/issues/253)
##0.10.12
###🐛 Fixes
* Fix `QuerySet.create` method not using init (if custom provided) [#245](https://github.com/collerek/ormar/issues/245)
* Fix `ForwardRef` `ManyToMany` relation setting wrong pydantic type [#250](https://github.com/collerek/ormar/issues/250)
##0.10.11
###✨ Features
* Add `values` and `values_list` to `QuerySet` and `QuerysetProxy` that allows to return raw data from query [#223](https://github.com/collerek/ormar/issues/223).
* Allow returning list of tuples or list of dictionaries from a query
* Skips parsing the data to ormar model so skips also the validation
* Allow excluding models in between in chain of relations, so you can extract only needed columns
* `values_list` allows you to flatten the result if you extract only one column.
###🐛 Fixes
* Fix creation of auto through model for m2m relation with ForwardRef [#226](https://github.com/collerek/ormar/issues/226)
##0.10.10
###✨ Features
* Add [`get_pydantic`](https://collerek.github.io/ormar/models/methods/#get_pydantic) flag that allows you to auto generate equivalent pydantic models tree from ormar.Model. This newly generated model tree can be used in requests and responses to exclude fields you do not want to include in the data.
* Add [`exclude_parent_fields`](https://collerek.github.io/ormar/models/inheritance/#exclude_parent_fields) parameter to model Meta that allows you to exclude fields from parent models during inheritance. Note that best practice is to combine models and mixins but if you have many similar models and just one that differs it might be useful tool to achieve that.
###🐛 Fixes
* Fix is null filter with pagination and relations (by @erichaydel) [#214](https://github.com/collerek/ormar/issues/214)
* Fix not saving child object on reverse side of the relation if not saved before [#216](https://github.com/collerek/ormar/issues/216)
###💬 Other
* Expand [fastapi](https://collerek.github.io/ormar/fastapi) part of the documentation to show samples of using ormar in requests and responses in fastapi.
* Improve the docs in regard of `default`, `ForeignKey.add` etc.
##0.10.9
###Important security fix
* Update pin for pydantic to fix security vulnerability [CVE-2021-29510](https://github.com/samuelcolvin/pydantic/security/advisories/GHSA-5jqp-qgf6-3pvh)
You are advised to update to version of pydantic that was patched.
In 0.10.9 ormar excludes versions with vulnerability in pinned dependencies.
###🐛 Fixes
* Fix OpenAPi schema for LargeBinary [#204](https://github.com/collerek/ormar/issues/204)
##0.10.8
###🐛 Fixes
* Fix populating default values in pk_only child models [#202](https://github.com/collerek/ormar/issues/202)
* Fix mypy for LargeBinary fields with base64 str representation [#199](https://github.com/collerek/ormar/issues/199)
* Fix OpenAPI schema format for LargeBinary fields with base64 str representation [#199](https://github.com/collerek/ormar/issues/199)
* Fix OpenAPI choices encoding for LargeBinary fields with base64 str representation
##0.10.7
###✨ Features
* Add `exclude_primary_keys: bool = False` flag to `dict()` method that allows to exclude all primary key columns in the resulting dictionaru. [#164](https://github.com/collerek/ormar/issues/164)
* Add `exclude_through_models: bool = False` flag to `dict()` that allows excluding all through models from `ManyToMany` relations [#164](https://github.com/collerek/ormar/issues/164)
* Add `represent_as_base64_str: bool = False` parameter that allows conversion of bytes `LargeBinary` field to base64 encoded string. String is returned in `dict()`,
on access to attribute and string is converted to bytes on setting. Data in database is stored as bytes. [#187](https://github.com/collerek/ormar/issues/187)
* Add `pk` alias to allow field access by `Model.pk` in filters and order by clauses (python style)
###🐛 Fixes
* Remove default `None` option for `max_length` for `LargeBinary` field [#186](https://github.com/collerek/ormar/issues/186)
* Remove default `None` option for `max_length` for `String` field
###💬 Other
* Provide a guide and samples of `dict()` parameters in the [docs](https://collerek.github.io/ormar/models/methods/)
* Major refactor of getting/setting attributes from magic methods into descriptors -> noticeable performance improvement
##0.10.6
###✨ Features
* Add `LargeBinary(max_length)` field type [#166](https://github.com/collerek/ormar/issues/166)
* Add support for normal pydantic fields (including Models) instead of `pydantic_only`
attribute which is now deprecated [#160](https://github.com/collerek/ormar/issues/160).
Pydantic fields should be declared normally as in pydantic model next to ormar fields,
note that (obviously) `ormar` does not save and load the value for this field in
database that mean that **ONE** of the following has to be true:
* pydantic field declared on ormar model has to be `Optional` (defaults to None)
* pydantic field has to have a default value set
* pydantic field has `default_factory` function set
* ormar.Model with pydantic field has to overwrite `__init__()` and provide the value there
If none of the above `ormar` (or rather pydantic) will fail during loading data from the database,
with missing required value for declared pydantic field.
* Ormar provides now a meaningful examples in openapi schema, including nested models.
The same algorithm is used to iterate related models without looks
as with `dict()` and `select/load_all`. Examples appear also in `fastapi`. [#157](https://github.com/collerek/ormar/issues/157)
###🐛 Fixes
* By default `pydantic` is not validating fields during assignment,
which is not a desirable setting for an ORM, now all `ormar.Models`
have validation turned-on during assignment (like `model.column = 'value'`)
###💬 Other
* Add connecting to the database in QuickStart in readme [#180](https://github.com/collerek/ormar/issues/180)
* OpenAPI schema does no longer include `ormar.Model` docstring as description,
instead just model name is provided if you do not provide your own docstring.
* Some performance improvements.
##0.10.5
###🐛 Fixes
* Fix bug in `fastapi-pagination` [#73](https://github.com/uriyyo/fastapi-pagination/issues/73)
* Remove unnecessary `Optional` in `List[Optional[T]]` in return value for `QuerySet.all()` and `Querysetproxy.all()` return values [#174](https://github.com/collerek/ormar/issues/174)
* Run tests coverage publish only on internal prs instead of all in github action.
##0.10.4
###✨ Features
* Add **Python style** to `filter` and `order_by` with field access instead of dunder separated strings. [#51](https://github.com/collerek/ormar/issues/51)
* Accessing a field with attribute access (chain of dot notation) can be used to construct `FilterGroups` (`ormar.and_` and `ormar.or_`)
* Field access overloads set of python operators and provide a set of functions to allow same functionality as with dunder separated param names in `**kwargs`, that means that querying from sample model `Track` related to model `Album` now you have more options:
* exact - exact match to value, sql `column = `
* OLD: `album__name__exact='Malibu'`
* NEW: can be also written as `Track.album.name == 'Malibu`
* iexact - exact match sql `column = ` (case insensitive)
* OLD: `album__name__iexact='malibu'`
* NEW: can be also written as `Track.album.name.iexact('malibu')`
* contains - sql `column LIKE '%%'`
* OLD: `album__name__contains='Mal'`
* NEW: can be also written as `Track.album.name % 'Mal')`
* NEW: can be also written as `Track.album.name.contains('Mal')`
* icontains - sql `column LIKE '%%'` (case insensitive)
* OLD: `album__name__icontains='mal'`
* NEW: can be also written as `Track.album.name.icontains('mal')`
* in - sql ` column IN (, , ...)`
* OLD: `album__name__in=['Malibu', 'Barclay']`
* NEW: can be also written as `Track.album.name << ['Malibu', 'Barclay']`
* NEW: can be also written as `Track.album.name.in_(['Malibu', 'Barclay'])`
* isnull - sql `column IS NULL` (and sql `column IS NOT NULL`)
* OLD: `album__name__isnull=True` (isnotnull `album__name__isnull=False`)
* NEW: can be also written as `Track.album.name >> None`
* NEW: can be also written as `Track.album.name.isnull(True)`
* NEW: not null can be also written as `Track.album.name.isnull(False)`
* NEW: not null can be also written as `~(Track.album.name >> None)`
* NEW: not null can be also written as `~(Track.album.name.isnull(True))`
* gt - sql `column > ` (greater than)
* OLD: `position__gt=3`
* NEW: can be also written as `Track.album.name > 3`
* gte - sql `column >= ` (greater or equal than)
* OLD: `position__gte=3`
* NEW: can be also written as `Track.album.name >= 3`
* lt - sql `column < ` (lower than)
* OLD: `position__lt=3`
* NEW: can be also written as `Track.album.name < 3`
* lte - sql `column <= ` (lower equal than)
* OLD: `position__lte=3`
* NEW: can be also written as `Track.album.name <= 3`
* startswith - sql `column LIKE '%'` (exact start match)
* OLD: `album__name__startswith='Mal'`
* NEW: can be also written as `Track.album.name.startswith('Mal')`
* istartswith - sql `column LIKE '%'` (case insensitive)
* OLD: `album__name__istartswith='mal'`
* NEW: can be also written as `Track.album.name.istartswith('mal')`
* endswith - sql `column LIKE '%'` (exact end match)
* OLD: `album__name__endswith='ibu'`
* NEW: can be also written as `Track.album.name.endswith('ibu')`
* iendswith - sql `column LIKE '%'` (case insensitive)
* OLD: `album__name__iendswith='IBU'`
* NEW: can be also written as `Track.album.name.iendswith('IBU')`
* You can provide `FilterGroups` not only in `filter()` and `exclude()` but also in:
* `get()`
* `get_or_none()`
* `get_or_create()`
* `first()`
* `all()`
* `delete()`
* With `FilterGroups` (`ormar.and_` and `ormar.or_`) you can now use:
* `&` - as `and_` instead of next level of nesting
* `|` - as `or_' instead of next level of nesting
* `~` - as negation of the filter group
* To combine groups of filters into one set of conditions use `&` (sql `AND`) and `|` (sql `OR`)
```python
# Following queries are equivalent:
# sql: ( product.name = 'Test' AND product.rating >= 3.0 )
# ormar OPTION 1 - OLD one
Product.objects.filter(name='Test', rating__gte=3.0).get()
# ormar OPTION 2 - OLD one
Product.objects.filter(ormar.and_(name='Test', rating__gte=3.0)).get()
# ormar OPTION 3 - NEW one (field access)
Product.objects.filter((Product.name == 'Test') & (Product.rating >=3.0)).get()
```
* Same applies to nested complicated filters
```python
# Following queries are equivalent:
# sql: ( product.name = 'Test' AND product.rating >= 3.0 )
# OR (categories.name IN ('Toys', 'Books'))
# ormar OPTION 1 - OLD one
Product.objects.filter(ormar.or_(
ormar.and_(name='Test', rating__gte=3.0),
categories__name__in=['Toys', 'Books'])
).get()
# ormar OPTION 2 - NEW one (instead of nested or use `|`)
Product.objects.filter(
ormar.and_(name='Test', rating__gte=3.0) |
ormar.and_(categories__name__in=['Toys', 'Books'])
).get()
# ormar OPTION 3 - NEW one (field access)
Product.objects.filter(
((Product.name='Test') & (Product.rating >= 3.0)) |
(Product.categories.name << ['Toys', 'Books'])
).get()
```
* Now you can also use field access to provide OrderActions to `order_by()`
* Order ascending:
* OLD: `Product.objects.order_by("name").all()`
* NEW: `Product.objects.order_by(Product.name.asc()).all()`
* Order descending:
* OLD: `Product.objects.order_by("-name").all()`
* NEW: `Product.objects.order_by(Product.name.desc()).all()`
* You can of course also combine different models and many order_bys:
`Product.objects.order_by([Product.category.name.asc(), Product.name.desc()]).all()`
### 🐛 Fixes
* Not really a bug but rather inconsistency. Providing a filter with nested model i.e. `album__category__name = 'AA'`
is checking if album and category models are included in `select_related()` and if not it's auto-adding them there.
The same functionality was not working for `FilterGroups` (`and_` and `or_`), now it works (also for python style filters which return `FilterGroups`).
## 0.10.3
### ✨ Features
* `ForeignKey` and `ManyToMany` now support `skip_reverse: bool = False` flag [#118](https://github.com/collerek/ormar/issues/118).
If you set `skip_reverse` flag internally the field is still registered on the other
side of the relationship so you can:
* `filter` by related models fields from reverse model
* `order_by` by related models fields from reverse model
But you cannot:
* access the related field from reverse model with `related_name`
* even if you `select_related` from reverse side of the model the returned models won't be populated in reversed instance (the join is not prevented so you still can `filter` and `order_by`)
* the relation won't be populated in `dict()` and `json()`
* you cannot pass the nested related objects when populating from `dict()` or `json()` (also through `fastapi`). It will be either ignored or raise error depending on `extra` setting in pydantic `Config`.
* `Model.save_related()` now can save whole data tree in once [#148](https://github.com/collerek/ormar/discussions/148)
meaning:
* it knows if it should save main `Model` or related `Model` first to preserve the relation
* it saves main `Model` if
* it's not `saved`,
* has no `pk` value
* or `save_all=True` flag is set
in those cases you don't have to split save into two calls (`save()` and `save_related()`)
* it supports also `ManyToMany` relations
* it supports also optional `Through` model values for m2m relations
* Add possibility to customize `Through` model relation field names.
* By default `Through` model relation names default to related model name in lowercase.
So in example like this:
```python
... ## course declaration omitted
class Student(ormar.Model):
class Meta:
database = database
metadata = metadata
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
courses = ormar.ManyToMany(Course)
## will produce default Through model like follows (example simplified)
class StudentCourse(ormar.Model):
class Meta:
database = database
metadata = metadata
tablename = "students_courses"
id: int = ormar.Integer(primary_key=True)
student = ormar.ForeignKey(Student) ## default name
course = ormar.ForeignKey(Course) # default name
```
* To customize the names of fields/relation in Through model now you can use new parameters to `ManyToMany`:
* `through_relation_name` - name of the field leading to the model in which `ManyToMany` is declared
* `through_reverse_relation_name` - name of the field leading to the model to which `ManyToMany` leads to
Example:
```python
... # course declaration omitted
class Student(ormar.Model):
class Meta:
database = database
metadata = metadata
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
courses = ormar.ManyToMany(Course,
through_relation_name="student_id",
through_reverse_relation_name="course_id")
# will produce default Through model like follows (example simplified)
class StudentCourse(ormar.Model):
class Meta:
database = database
metadata = metadata
tablename = "students_courses"
id: int = ormar.Integer(primary_key=True)
student_id = ormar.ForeignKey(Student) # set by through_relation_name
course_id = ormar.ForeignKey(Course) # set by through_reverse_relation_name
```
### 🐛 Fixes
* Fix weakref `ReferenceError` error [#118](https://github.com/collerek/ormar/issues/118)
* Fix error raised by Through fields when pydantic `Config.extra="forbid"` is set
* Fix bug with `pydantic.PrivateAttr` not being initialized at `__init__` [#149](https://github.com/collerek/ormar/issues/149)
* Fix bug with pydantic-type `exclude` in `dict()` with `__all__` key not working
### 💬 Other
* Introduce link to `sqlalchemy-to-ormar` auto-translator for models
* Provide links to fastapi ecosystem libraries that support `ormar`
* Add transactions to docs (supported with `databases`)
## 0.10.2
### ✨ Features
* `Model.save_related(follow=False)` now accept also two additional arguments: `Model.save_related(follow=False, save_all=False, exclude=None)`.
* `save_all:bool` -> By default (so with `save_all=False`) `ormar` only upserts models that are not saved (so new or updated ones),
with `save_all=True` all related models are saved, regardless of `saved` status, which might be useful if updated
models comes from api call, so are not changed in the backend.
* `exclude: Union[Set, Dict, None]` -> set/dict of relations to exclude from save, those relation won't be saved even with `follow=True` and `save_all=True`.
To exclude nested relations pass a nested dictionary like: `exclude={"child":{"sub_child": {"exclude_sub_child_realtion"}}}`. The allowed values follow
the `fields/exclude_fields` (from `QuerySet`) methods schema so when in doubt you can refer to docs in queries -> selecting subset of fields -> fields.
* `Model.update()` method now accepts `_columns: List[str] = None` parameter, that accepts list of column names to update. If passed only those columns will be updated in database.
Note that `update()` does not refresh the instance of the Model, so if you change more columns than you pass in `_columns` list your Model instance will have different values than the database!
* `Model.model_dump()` method previously included only directly related models or nested models if they were not nullable and not virtual,
now all related models not previously visited without loops are included in `dict()`. This should be not breaking
as just more data will be dumped to dict, but it should not be missing.
* `QuerySet.delete(each=False, **kwargs)` previously required that you either pass a `filter` (by `**kwargs` or as a separate `filter()` call) or set `each=True` now also accepts
`exclude()` calls that generates NOT filter. So either `each=True` needs to be set to delete whole table or at least one of `filter/exclude` clauses.
* Same thing applies to `QuerySet.update(each=False, **kwargs)` which also previously required that you either pass a `filter` (by `**kwargs` or as a separate `filter()` call) or set `each=True` now also accepts
`exclude()` calls that generates NOT filter. So either `each=True` needs to be set to update whole table or at least one of `filter/exclude` clauses.
* Same thing applies to `QuerysetProxy.update(each=False, **kwargs)` which also previously required that you either pass a `filter` (by `**kwargs` or as a separate `filter()` call) or set `each=True` now also accepts
`exclude()` calls that generates NOT filter. So either `each=True` needs to be set to update whole table or at least one of `filter/exclude` clauses.
### 🐛 Fixes
* Fix improper relation field resolution in `QuerysetProxy` if fk column has different database alias.
* Fix hitting recursion error with very complicated models structure with loops when calling `dict()`.
* Fix bug when two non-relation fields were merged (appended) in query result when they were not relation fields (i.e. JSON)
* Fix bug when during translation to dict from list the same relation name is used in chain but leads to different models
* Fix bug when bulk_create would try to save also `property_field` decorated methods and `pydantic` fields
* Fix wrong merging of deeply nested chain of reversed relations
### 💬 Other
* Performance optimizations
* Split tests into packages based on tested area
## 0.10.1
### Features
* add `get_or_none(**kwargs)` method to `QuerySet` and `QuerysetProxy`. It is exact equivalent of `get(**kwargs)` but instead of raising `ormar.NoMatch` exception if there is no db record matching the criteria, `get_or_none` simply returns `None`.
### Fixes
* Fix dialect dependent quoting of column and table names in order_by clauses not working
properly in postgres.
## 0.10.0
### Breaking
* Dropped supported for long deprecated notation of field definition in which you use ormar fields as type hints i.e. `test_field: ormar.Integger() = None`
* Improved type hints -> `mypy` can properly resolve related models fields (`ForeignKey` and `ManyToMany`) as well as return types of `QuerySet` methods.
Those mentioned are now returning proper model (i.e. `Book`) instead or `ormar.Model` type. There is still problem with reverse sides of relation and `QuerysetProxy` methods,
to ease type hints now those return `Any`. Partially fixes #112.
### Features
* add `select_all(follow: bool = False)` method to `QuerySet` and `QuerysetProxy`.
It is kind of equivalent of the Model's `load_all()` method but can be used directly in a query.
By default `select_all()` adds only directly related models, with `follow=True` also related models
of related models are added without loops in relations. Note that it's not and end `async` model
so you still have to issue `get()`, `all()` etc. as `select_all()` returns a QuerySet (or proxy)
like `fields()` or `order_by()`.
### Internals
* `ormar` fields are no longer stored as classes in `Meta.model_fields` dictionary
but instead they are stored as instances.
## 0.9.9
### Features
* Add possibility to change default ordering of relations and models.
* To change model sorting pass `orders_by = [columns]` where `columns: List[str]` to model `Meta` class
* To change relation order_by pass `orders_by = [columns]` where `columns: List[str]`
* To change reverse relation order_by pass `related_orders_by = [columns]` where `columns: List[str]`
* Arguments can be column names or `-{col_name}` to sort descending
* In relations you can sort only by directly related model columns
or for `ManyToMany` columns also `Through` model columns `"{through_field_name}__{column_name}"`
* Order in which order_by clauses are applied is as follows:
* Explicitly passed `order_by()` calls in query
* Relation passed `orders_by` if exists
* Model `Meta` class `orders_by`
* Model primary key column asc (fallback, used if none of above provided)
* Add 4 new aggregated functions -> `min`, `max`, `sum` and `avg` that are their
corresponding sql equivalents.
* You can pass one or many column names including related columns.
* As of now each column passed is aggregated separately (so `sum(col1+col2)` is not possible,
you can have `sum(col1, col2)` and later add 2 returned sums in python)
* You cannot `sum` and `avg` non numeric columns
* If you aggregate on one column, the single value is directly returned as a result
* If you aggregate on multiple columns a dictionary with column: result pairs is returned
* Add 4 new signals -> `pre_relation_add`, `post_relation_add`, `pre_relation_remove` and `post_relation_remove`
* The newly added signals are emitted for `ManyToMany` relations (both sides)
and reverse side of `ForeignKey` relation (same as `QuerysetProxy` is exposed).
* Signals receive following args: `sender: Type[Model]` - sender class,
`instance: Model` - instance to which related model is added, `child: Model` - model being added,
`relation_name: str` - name of the relation to which child is added,
for add signals also `passed_kwargs: Dict` - dict of kwargs passed to `add()`
### Changes
* `Through` models for ManyToMany relations are now instantiated on creation, deletion and update, so you can provide not only
autoincrement int as a primary key but any column type with default function provided.
* Since `Through` models are now instantiated you can also subscribe to `Through` model
pre/post save/update/delete signals
* `pre_update` signals receivers now get also passed_args argument which is a
dict of values passed to update function if any (else empty dict)
### Fixes
* `pre_update` signal now is sent before the extraction of values so you can modify the passed
instance in place and modified fields values will be reflected in database
* `bulk_update` now works correctly also with `UUID` primary key column type
## 0.9.8
### Features
* Add possibility to encrypt the selected field(s) in the database
* As minimum you need to provide `encrypt_secret` and `encrypt_backend`
* `encrypt_backend` can be one of the `ormar.EncryptBackends` enum (`NONE, FERNET, HASH, CUSTOM`) - default: `NONE`
* When custom backend is selected you need to provide your backend class that subclasses `ormar.fields.EncryptBackend`
* You cannot encrypt `primary_key` column and relation columns (FK and M2M).
* Provided are 2 backends: HASH and FERNET
* HASH is a one-way hash (like for password), never decrypted on retrieval
* FERNET is a two-way encrypt/decrypt backend
* Note that in FERNET backend you loose `filtering` possibility altogether as part of the encrypted value is a timestamp.
* Note that in HASH backend you can filter by full value but filters like `contain` will not work as comparison is make on encrypted values
* Note that adding `encrypt_backend` changes the database column type to `TEXT`, which needs to be reflected in db either by migration or manual change
### Fixes
* (Advanced/ Internal) Restore custom sqlalchemy types (by `types.TypeDecorator` subclass) functionality that ceased to working so `process_result_value` was never called
## 0.9.7
### Features
* Add `isnull` operator to filter and exclude methods.
```python
album__name__isnull=True #(sql: album.name is null)
album__name__isnull=False #(sql: album.name is not null))
```
* Add `ormar.or_` and `ormar.and_` functions that can be used to compose
complex queries with nested conditions.
Sample query:
```python
books = (
await Book.objects.select_related("author")
.filter(
ormar.and_(
ormar.or_(year__gt=1960, year__lt=1940),
author__name="J.R.R. Tolkien",
)
)
.all()
)
```
Check the updated docs in Queries -> Filtering and sorting -> Complex filters
### Other
* Setting default on `ForeignKey` or `ManyToMany` raises and `ModelDefinition` exception as it is (and was) not supported
## 0.9.6
##Important
* `Through` model for `ManyToMany` relations now **becomes optional**. It's not a breaking change
since if you provide it everything works just fine as it used to. So if you don't want or need any additional
fields on `Through` model you can skip it. Note that it's going to be created for you automatically and
still has to be included in example in `alembic` migrations.
If you want to delete existing one check the default naming convention to adjust your existing database structure.
Note that you still need to provide it if you want to
customize the `Through` model name or the database table name.
### Features
* Add `update` method to `QuerysetProxy` so now it's possible to update related models directly from parent model
in `ManyToMany` relations and in reverse `ForeignKey` relations. Note that update like in `QuerySet` `update` returns number of
updated models and **does not update related models in place** on parent model. To get the refreshed data on parent model you need to refresh
the related models (i.e. `await model_instance.related.all()`)
* Add `load_all(follow=False, exclude=None)` model method that allows to load current instance of the model
with all related models in one call. By default it loads only directly related models but setting
`follow=True` causes traversing the tree (avoiding loops). You can also pass `exclude` parameter
that works the same as `QuerySet.exclude_fields()` method.
* Added possibility to add more fields on `Through` model for `ManyToMany` relationships:
* name of the through model field is the lowercase name of the Through class
* you can pass additional fields when calling `add(child, **kwargs)` on relation (on `QuerysetProxy`)
* you can pass additional fields when calling `create(**kwargs)` on relation (on `QuerysetProxy`)
when one of the keyword arguments should be the through model name with a dict of values
* you can order by on through model fields
* you can filter on through model fields
* you can include and exclude fields on through models
* through models are attached only to related models (i.e. if you query from A to B -> only on B)
* note that through models are explicitly loaded without relations -> relation is already populated in ManyToMany field.
* note that just like before you cannot declare the relation fields on through model, they will be populated for you by `ormar`,
but now if you try to do so `ModelDefinitionError` will be thrown
* check the updated ManyToMany relation docs for more information
## Other
* Updated docs and api docs
* Refactors and optimisations mainly related to filters, exclusions and order bys
## 0.9.5
### Fixes
* Fix creation of `pydantic` FieldInfo after update of `pydantic` to version >=1.8
* Pin required dependency versions to avoid such situations in the future
## 0.9.4
### Fixes
* Fix `fastapi` OpenAPI schema generation for automatic docs when multiple models refer to the same related one
## 0.9.3
### Fixes
* Fix `JSON` field being double escaped when setting value after initialization
* Fix `JSON` field not respecting `nullable` field setting due to `pydantic` internals
* Fix `choices` verification for `JSON` field
* Fix `choices` not being verified when setting the attribute after initialization
* Fix `choices` not being verified during `update` call from `QuerySet`
## 0.9.2
### Other
* Updated the Quick Start in docs/readme
* Updated docs with links to queries subpage
* Added badges for code climate and pepy downloads
## 0.9.1
### Features
* Add choices values to `OpenAPI` specs, so it looks like native `Enum` field in the result schema.
### Fixes
* Fix `choices` behavior with `fastapi` usage when special fields can be not initialized yet but passed as strings etc.
## 0.9.0
### Important
* **Braking Fix:** Version 0.8.0 introduced a bug that prevents generation of foreign_keys constraint in the database,
both in alembic and during creation through sqlalchemy.engine, this is fixed now.
* **THEREFORE IF YOU USE VERSION >=0.8.0 YOU ARE STRONGLY ADVISED TO UPDATE** cause despite
that most of the `ormar` functions are working your database **CREATED with ormar (or ormar + alembic)**
does not have relations and suffer from perspective of performance and data integrity.
* If you were using `ormar` to connect to existing database your performance and integrity
should be fine nevertheless you should update to reflect all future schema updates in your models.
### Breaking
* **Breaking:** All foreign_keys and unique constraints now have a name so `alembic`
can identify them in db and not depend on db
* **Breaking:** During model construction if `Meta` class of the `Model` does not
include `metadata` or `database` now `ModelDefinitionError` will be raised instead of generic `AttributeError`.
* **Breaking:** `encode/databases` used for running the queries does not have a connection pool
for sqlite backend, meaning that each query is run with a new connection and there is no way to
enable enforcing ForeignKeys constraints as those are by default turned off on every connection.
This is changed in `ormar` since >=0.9.0 and by default each sqlite3 query has `"PRAGMA foreign_keys=1;"`
run so now each sqlite3 connection by default enforces ForeignKey constraints including cascades.
### Other
* Update api docs.
* Add tests for fk creation in db and for cascades in db
## 0.8.1
### Features
* Introduce processing of `ForwardRef` in relations.
Now you can create self-referencing models - both `ForeignKey` and `ManyToMany` relations.
`ForwardRef` can be used both for `to` and `through` `Models`.
* Introduce the possibility to perform two **same relation** joins in one query, so to process complex relations like:
```
B = X = Y
//
A
\
C = X = Y <= before you could link from X to Y only once in one query
unless two different relation were used
(two relation fields with different names)
```
* Introduce the `paginate` method that allows to limit/offset by `page` and `page_size`.
Available for `QuerySet` and `QuerysetProxy`.
### Other
* Refactoring and performance optimization in queries and joins.
* Add python 3.9 to tests and pypi setup.
* Update API docs and docs -> i.e. split of queries documentation.
## 0.8.0
### Breaking
* **Breaking:** `remove()` parent from child side in reverse ForeignKey relation now requires passing a relation `name`,
as the same model can be registered multiple times and `ormar` needs to know from which relation on the parent you want to remove the child.
* **Breaking:** applying `limit` and `offset` with `select_related` is by default applied only on the main table before the join -> meaning that not the total
number of rows is limited but just number of main models (first one in the query, the one used to construct it). You can still limit all rows from db response with `limit_raw_sql=True` flag on either `limit` or `offset` (or both)
* **Breaking:** issuing `first()` now fetches the first row ordered by the primary key asc (so first one inserted (can be different for non number primary keys - i.e. alphabetical order of string))
* **Breaking:** issuing `get()` **without any filters** now fetches the first row ordered by the primary key desc (so should be last one inserted (can be different for non number primary keys - i.e. alphabetical order of string))
* **Breaking (internal):** sqlalchemy columns kept at `Meta.columns` are no longer bind to table, so you cannot get the column straight from there
### Features
* Introduce **inheritance**. For now two types of inheritance are possible:
* **Mixins** - don't subclass `ormar.Model`, just define fields that are later used on different models (like `created_date` and `updated_date` on each child model), only actual models create tables, but those fields from mixins are added
* **Concrete table inheritance** - means that parent is marked as `abstract=True` in Meta class and each child has its own table with columns from the parent and own child columns, kind of similar to Mixins but parent also is a (an abstract) Model
* To read more check the docs on models -> inheritance section.
* QuerySet `first()` can be used with `prefetch_related`
### Fixes
* Fix minor bug in `order_by` for primary model order bys
* Fix in `prefetch_query` for multiple related_names for the same model.
* Fix using same `related_name` on different models leading to the same related `Model` overwriting each other, now `ModelDefinitionError` is raised and you need to change the name.
* Fix `order_by` overwriting conditions when multiple joins to the same table applied.
### Docs
* Split and cleanup in docs:
* Divide models section into subsections
* Divide relations section into subsections
* Divide fields section into subsections
* Add model inheritance section
* Add API (BETA) documentation
## 0.7.5
* Fix for wrong relation column name in many_to_many relation joins (fix [#73][#73])
## 0.7.4
* Allow multiple relations to the same related model/table.
* Fix for wrong relation column used in many_to_many relation joins (fix [#73][#73])
* Fix for wrong relation population for m2m relations when also fk relation present for same model.
* Add check if user provide related_name if there are multiple relations to same table on one model.
* More eager cleaning of the dead weak proxy models.
## 0.7.3
* Fix for setting fetching related model with UUDI pk, which is a string in raw (fix [#71][#71])
## 0.7.2
* Fix for overwriting related models with pk only in `Model.update() with fields passed as parameters` (fix [#70][#70])
## 0.7.1
* Fix for overwriting related models with pk only in `Model.save()` (fix [#68][#68])
## 0.7.0
* **Breaking:** QuerySet `bulk_update` method now raises `ModelPersistenceError` for unsaved models passed instead of `QueryDefinitionError`
* **Breaking:** Model initialization with unknown field name now raises `ModelError` instead of `KeyError`
* Added **Signals**, with pre-defined list signals and decorators: `post_delete`, `post_save`, `post_update`, `pre_delete`,
`pre_save`, `pre_update`
* Add `py.typed` and modify `setup.py` for mypy support
* Performance optimization
* Updated docs
## 0.6.2
* Performance optimization
* Fix for bug with `pydantic_only` fields being required
* Add `property_field` decorator that registers a function as a property that will
be included in `Model.model_dump()` and in `fastapi` response
* Update docs
## 0.6.1
* Explicitly set None to excluded nullable fields to avoid pydantic setting a default value (fix [#60][#60]).
## 0.6.0
* **Breaking:** calling instance.load() when the instance row was deleted from db now raises `NoMatch` instead of `ValueError`
* **Breaking:** calling add and remove on ReverseForeignKey relation now updates the child model in db setting/removing fk column
* **Breaking:** ReverseForeignKey relation now exposes QuerySetProxy API like ManyToMany relation
* **Breaking:** querying related models from ManyToMany cleans list of related models loaded on parent model:
* Example: `post.categories.first()` will set post.categories to list of 1 related model -> the one returned by first()
* Example 2: if post has 4 categories so `len(post.categories) == 4` calling `post.categories.limit(2).all()` -> will load only 2 children and now `assert len(post.categories) == 2`
* Added `get_or_create`, `update_or_create`, `fields`, `exclude_fields`, `exclude`, `prefetch_related` and `order_by` to QuerySetProxy
so now you can use those methods directly from relation
* Update docs
## 0.5.5
* Fix for alembic autogenaration of migration `UUID` columns. It should just produce sqlalchemy `CHAR(32)` or `CHAR(36)`
* In order for this to work you have to set user_module_prefix='sa.' (must be equal to sqlalchemy_module_prefix option (default 'sa.'))
## 0.5.4
* Allow to pass `uuid_format` (allowed 'hex'(default) or 'string') to `UUID` field to change the format in which it's saved.
By default field is saved in hex format (trimmed to 32 chars (without dashes)), but you can pass
format='string' to use 36 (with dashes) instead to adjust to existing db or other libraries.
Sample:
* hex value = c616ab438cce49dbbf4380d109251dce
* string value = c616ab43-8cce-49db-bf43-80d109251dce
## 0.5.3
* Fixed bug in `Model.model_dump()` method that was ignoring exclude parameter and not include dictionary argument.
## 0.5.2
* Added `prefetch_related` method to load subsequent models in separate queries.
* Update docs
## 0.5.1
* Switched to github actions instead of travis
* Update badges in the docs
## 0.5.0
* Added save status -> you can check if model is saved with `ModelInstance.saved` property
* Model is saved after `save/update/load/upsert` method on model
* Model is saved after `create/get/first/all/get_or_create/update_or_create` method
* Model is saved when passed to `bulk_update` and `bulk_create`
* Model is saved after adding/removing `ManyToMany` related objects (through model instance auto saved/deleted)
* Model is **not** saved after change of any own field (including pk as `Model.pk` alias)
* Model is **not** saved after adding/removing `ForeignKey` related object (fk column not saved)
* Model is **not** saved after instantiation with `__init__` (w/o `QuerySet.create` or before calling `save`)
* Added `Model.upsert(**kwargs)` that performs `save()` if pk not set otherwise `update(**kwargs)`
* Added `Model.save_related(follow=False)` that iterates all related objects in all relations and checks if they are saved. If not it calls `upsert()` on each of them.
* **Breaking:** added raising exceptions if `add`-ing/`remove`-ing not saved (pk is None) models to `ManyToMany` relation
* Allow passing dictionaries and sets to fields and exclude_fields
* Auto translate str and lists to dicts for fields and exclude_fields
* **Breaking:** passing nested models to fields and exclude_fields is now by related ForeignKey name and not by target model name
* Performance optimizations - in modelproxy, newbasemodel - > less queries, some properties are cached on models
* Cleanup of unused relations code
* Optional performance dependency orjson added (**strongly recommended**)
* Updated docs
## 0.4.4
* add exclude_fields() method to exclude fields from sql
* refactor column names setting (aliases)
* fix ordering by for column with aliases
* additional tests for fields and exclude_fields
* update docs
## 0.4.3
* include properties in models.model_dump() and model.model_dump_json()
## 0.4.2
* modify creation of pydantic models to allow returning related models with only pk populated
## 0.4.1
* add order_by method to queryset to allow sorting
* update docs
## 0.4.0
* Changed notation in Model definition -> now use name = ormar.Field() not name: ormar.Field()
* Note that old notation is still supported but deprecated and will not play nice with static checkers like mypy and pydantic pycharm plugin
* Type hint docs and test
* Use mypy for tests also not, only ormar package
* Fix scale and precision translation with max_digits and decimal_places pydantic Decimal field
* Update docs - add best practices for dependencies
* Refactor metaclass and model_fields to play nice with type hints
* Add mypy and pydantic plugin to docs
* Expand the docs on ManyToMany relation
## 0.3.11
* Fix setting server_default as default field value in python
## 0.3.10
* Fix postgresql check to avoid exceptions with drivers not installed if using different backend
## 0.3.9
* Fix json schema generation as of [#19][#19]
* Fix for not initialized ManyToMany relations in fastapi copies of ormar.Models
* Update docs in regard of fastapi use
* Add tests to verify fastapi/docs proper generation
## 0.3.8
* Added possibility to provide alternative database column names with name parameter to all fields.
* Fix bug with selecting related ManyToMany fields with `fields()` if they are empty.
* Updated documentation
## 0.3.7
* Publish documentation and update readme
## 0.3.6
* Add fields() method to limit the selected columns from database - only nullable columns can be excluded.
* Added UniqueColumns and constraints list in model Meta to build unique constraints on list of columns.
* Added UUID field type based on Char(32) column type.
## 0.3.5
* Added bulk_create and bulk_update for operations on multiple objects.
## 0.3.4
Add queryset level methods
* delete
* update
* get_or_create
* update_or_create
## 0.3.3
* Add additional filters - startswith and endswith
## 0.3.2
* Add choices parameter to all fields - limiting the accepted values to ones provided
## 0.3.1
* Added exclude to filter where not conditions.
* Added tests for mysql and postgres with fixes for postgres.
* Rafactors and cleanup.
## 0.3.0
* Added ManyToMany field and support for many to many relations
[#19]: https://github.com/collerek/ormar/issues/19
[#60]: https://github.com/collerek/ormar/issues/60
[#68]: https://github.com/collerek/ormar/issues/68
[#70]: https://github.com/collerek/ormar/issues/70
[#71]: https://github.com/collerek/ormar/issues/71
[#73]: https://github.com/collerek/ormar/issues/73
collerek-ormar-c09209a/docs/signals.md 0000664 0000000 0000000 00000023303 15130200524 0017664 0 ustar 00root root 0000000 0000000 # Signals
Signals are a mechanism to fire your piece of code (function / method) whenever given type of event happens in `ormar`.
To achieve this you need to register your receiver for a given type of signal for selected model(s).
## Defining receivers
Given a sample model like following:
```Python
import databases
import sqlalchemy
import ormar
base_ormar_config = ormar.OrmarConfig(
database=databases.Database("sqlite:///db.sqlite"),
metadata=sqlalchemy.MetaData(),
)
class Album(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
play_count: int = ormar.Integer(default=0)
```
You can for example define a trigger that will set `album.is_best_seller` status if it will be played more than 50 times.
Import `pre_update` decorator, for list of currently available decorators/ signals check below.
```Python hl_lines="7"
--8<-- "../docs_src/signals/docs002.py"
```
Define your function.
Note that each receiver function:
* has to be **callable**
* has to accept first **`sender`** argument that receives the class of sending object
* has to accept **`**kwargs`** argument as the parameters send in each `ormar.Signal` can change at any time so your function has to serve them.
* has to be **`async`** cause callbacks are gathered and awaited.
`pre_update` currently sends only one argument apart from `sender` and it's `instance` one.
Note how `pre_update` decorator accepts a `senders` argument that can be a single model or a list of models,
for which you want to run the signal receiver.
Currently there is no way to set signal for all models at once without explicitly passing them all into registration of receiver.
```Python hl_lines="28-31"
--8<-- "../docs_src/signals/docs002.py"
```
!!!note
Note that receivers are defined on a class level -> so even if you connect/disconnect function through instance
it will run/ stop running for all operations on that `ormar.Model` class.
Note that our newly created function has instance and class of the instance so you can easily run database
queries inside your receivers if you want to.
```Python hl_lines="41-48"
--8<-- "../docs_src/signals/docs002.py"
```
You can define same receiver for multiple models at once by passing a list of models to signal decorator.
```python
# define a dummy debug function
@pre_update([Album, Track])
async def before_update(sender, instance, **kwargs):
print(f"{sender.get_name()}: {instance.model_dump_json()}: {kwargs}")
```
Of course, you can also create multiple functions for the same signal and model. Each of them will run at each signal.
```python
@pre_update(Album)
async def before_update(sender, instance, **kwargs):
print(f"{sender.get_name()}: {instance.model_dump_json()}: {kwargs}")
@pre_update(Album)
async def before_update2(sender, instance, **kwargs):
print(f'About to update {sender.get_name()} with pk: {instance.pk}')
```
Note that `ormar` decorators are the syntactic sugar, you can directly connect your function or method for given signal for
given model. Connect accept only one parameter - your `receiver` function / method.
```python hl_lines="11 13 16"
class AlbumAuditor:
def __init__(self):
self.event_type = "ALBUM_INSTANCE"
async def before_save(self, sender, instance, **kwargs):
await AuditLog(
event_type=f"{self.event_type}_SAVE", event_log=instance.model_dump_json()
).save()
auditor = AlbumAuditor()
pre_save(Album)(auditor.before_save)
# call above has same result like the one below
Album.ormar_config.signals.pre_save.connect(auditor.before_save)
# signals are also exposed on instance
album = Album(name='Miami')
album.signals.pre_save.connect(auditor.before_save)
```
!!!warning
Note that signals keep the reference to your receiver (not a `weakref`) so keep that in mind to avoid circular references.
## Disconnecting the receivers
To disconnect the receiver and stop it for running for given model you need to disconnect it.
```python hl_lines="7 10"
@pre_update(Album)
async def before_update(sender, instance, **kwargs):
if instance.play_count > 50 and not instance.is_best_seller:
instance.is_best_seller = True
# disconnect given function from signal for given Model
Album.ormar_config.signals.pre_save.disconnect(before_save)
# signals are also exposed on instance
album = Album(name='Miami')
album.signals.pre_save.disconnect(before_save)
```
## Available signals
!!!warning
Note that signals are **not** send for:
* bulk operations (`QuerySet.bulk_create` and `QuerySet.bulk_update`) as they are designed for speed.
* queryset table level operations (`QuerySet.update` and `QuerySet.delete`) as they run on the underlying tables
(more like raw sql update/delete operations) and do not have specific instance.
### pre_save
`pre_save(sender: Type["Model"], instance: "Model")`
Send for `Model.save()` and `Model.objects.create()` methods.
`sender` is a `ormar.Model` class and `instance` is the model to be saved.
### post_save
`post_save(sender: Type["Model"], instance: "Model")`
Send for `Model.save()` and `Model.objects.create()` methods.
`sender` is a `ormar.Model` class and `instance` is the model that was saved.
### pre_update
`pre_update(sender: Type["Model"], instance: "Model")`
Send for `Model.update()` method.
`sender` is a `ormar.Model` class and `instance` is the model to be updated.
### post_update
`post_update(sender: Type["Model"], instance: "Model")`
Send for `Model.update()` method.
`sender` is a `ormar.Model` class and `instance` is the model that was updated.
### pre_delete
`pre_delete(sender: Type["Model"], instance: "Model")`
Send for `Model.save()` and `Model.objects.create()` methods.
`sender` is a `ormar.Model` class and `instance` is the model to be deleted.
### post_delete
`post_delete(sender: Type["Model"], instance: "Model")`
Send for `Model.update()` method.
`sender` is a `ormar.Model` class and `instance` is the model that was deleted.
### pre_relation_add
`pre_relation_add(sender: Type["Model"], instance: "Model", child: "Model",
relation_name: str, passed_args: Dict)`
Send for `Model.relation_name.add()` method for `ManyToMany` relations and reverse side of `ForeignKey` relation.
`sender` - sender class, `instance` - instance to which related model is added, `child` - model being added,
`relation_name` - name of the relation to which child is added, for add signals also `passed_kwargs` - dict of kwargs passed to `add()`
### post_relation_add
`post_relation_add(sender: Type["Model"], instance: "Model", child: "Model",
relation_name: str, passed_args: Dict)`
Send for `Model.relation_name.add()` method for `ManyToMany` relations and reverse side of `ForeignKey` relation.
`sender` - sender class, `instance` - instance to which related model is added, `child` - model being added,
`relation_name` - name of the relation to which child is added, for add signals also `passed_kwargs` - dict of kwargs passed to `add()`
### pre_relation_remove
`pre_relation_remove(sender: Type["Model"], instance: "Model", child: "Model",
relation_name: str)`
Send for `Model.relation_name.remove()` method for `ManyToMany` relations and reverse side of `ForeignKey` relation.
`sender` - sender class, `instance` - instance to which related model is added, `child` - model being added,
`relation_name` - name of the relation to which child is added.
### post_relation_remove
`post_relation_remove(sender: Type["Model"], instance: "Model", child: "Model",
relation_name: str, passed_args: Dict)`
Send for `Model.relation_name.remove()` method for `ManyToMany` relations and reverse side of `ForeignKey` relation.
`sender` - sender class, `instance` - instance to which related model is added, `child` - model being added,
`relation_name` - name of the relation to which child is added.
### post_bulk_update
`post_bulk_update(sender: Type["Model"], instances: List["Model"], **kwargs)`,
Send for `Model.objects.bulk_update(List[objects])` method.
## Defining your own signals
Note that you can create your own signals although you will have to send them manually in your code or subclass `ormar.Model`
and trigger your signals there.
Creating new signal is super easy. Following example will set a new signal with name your_custom_signal.
```python hl_lines="21"
import databases
import sqlalchemy
import ormar
base_ormar_config = ormar.OrmarConfig(
database=databases.Database("sqlite:///db.sqlite"),
metadata=sqlalchemy.MetaData(),
)
class Album(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
play_count: int = ormar.Integer(default=0)
Album.ormar_config.signals.your_custom_signal = ormar.Signal()
Album.ormar_config.signals.your_custom_signal.connect(your_receiver_name)
```
Actually under the hood signal is a `SignalEmitter` instance that keeps a dictionary of know signals, and allows you
to access them as attributes. When you try to access a signal that does not exist `SignalEmitter` will create one for you.
So example above can be simplified to. The `Signal` will be created for you.
```
Album.ormar_config.signals.your_custom_signal.connect(your_receiver_name)
```
Now to trigger this signal you need to call send method of the Signal.
```python
await Album.ormar_config.signals.your_custom_signal.send(sender=Album)
```
Note that sender is the only required parameter and it should be ormar Model class.
Additional parameters have to be passed as keyword arguments.
```python
await Album.ormar_config.signals.your_custom_signal.send(sender=Album, my_param=True)
```
collerek-ormar-c09209a/docs/transactions.md 0000664 0000000 0000000 00000004222 15130200524 0020733 0 ustar 00root root 0000000 0000000 # Transactions
Database transactions are supported thanks to `encode/databases` which is used to issue async queries.
## Basic usage
To use transactions use `database.transaction` as async context manager:
```python
async with database.transaction():
# everything called here will be one transaction
await Model1().save()
await Model2().save()
...
```
!!!note
Note that it has to be the same `database` that the one used in Model's `ormar_config` object.
To avoid passing `database` instance around in your code you can extract the instance from each `Model`.
Database provided during declaration of `ormar.Model` is available through `ormar_config.database` and can
be reached from both class and instance.
```python
import databases
import sqlalchemy
import ormar
base_ormar_config = OrmarConfig(
metadata=sqlalchemy.MetaData(),
database = databases.Database("sqlite:///"),
)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255)
# database is accessible from class
database = Author.ormar_config.database
# as well as from instance
author = Author(name="Stephen King")
database = author.ormar_config.database
```
You can also use `.transaction()` as a function decorator on any async function:
```python
@database.transaction()
async def create_users(request):
...
```
Transaction blocks are managed as task-local state. Nested transactions
are fully supported, and are implemented using database savepoints.
## Manual commits/ rollbacks
For a lower-level transaction API you can trigger it manually
```python
transaction = await database.transaction()
try:
await transaction.start()
...
except:
await transaction.rollback()
else:
await transaction.commit()
```
## Testing
Transactions can also be useful during testing when you can apply force rollback
and you do not have to clean the data after each test.
```python
@pytest.mark.asyncio
async def sample_test():
async with database:
async with database.transaction(force_rollback=True):
# your test code here
...
```
collerek-ormar-c09209a/docs_src/ 0000775 0000000 0000000 00000000000 15130200524 0016550 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0020647 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/aggregations/ 0000775 0000000 0000000 00000000000 15130200524 0021222 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/aggregations/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0023321 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/aggregations/docs001.py 0000664 0000000 0000000 00000001540 15130200524 0022745 0 ustar 00root root 0000000 0000000 from typing import Optional
import databases
import ormar
import sqlalchemy
from tests.settings import DATABASE_URL
database = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData()
base_ormar_config = ormar.OrmarConfig(
metadata=metadata,
database=database,
)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors", order_by=["-name"])
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="books", order_by=["year", "-ranking"]
)
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
ranking: int = ormar.Integer(nullable=True)
collerek-ormar-c09209a/docs_src/fastapi/ 0000775 0000000 0000000 00000000000 15130200524 0020177 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/fastapi/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022276 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/fastapi/docs001.py 0000664 0000000 0000000 00000003004 15130200524 0021717 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
from fastapi import FastAPI
from tests.lifespan import lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="items")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
@app.get("/items/", response_model=List[Item])
async def get_items():
items = await Item.objects.select_related("category").all()
return items
@app.post("/items/", response_model=Item)
async def create_item(item: Item):
await item.save()
return item
@app.post("/categories/", response_model=Category)
async def create_category(category: Category):
await category.save()
return category
@app.put("/items/{item_id}")
async def get_item(item_id: int, item: Item):
item_db = await Item.objects.get(pk=item_id)
return await item_db.update(**item.model_dump())
@app.delete("/items/{item_id}")
async def delete_item(item_id: int, item: Item = None):
if item:
return {"deleted_rows": await item.delete()}
item_db = await Item.objects.get(pk=item_id)
return {"deleted_rows": await item_db.delete()}
collerek-ormar-c09209a/docs_src/fastapi/mypy/ 0000775 0000000 0000000 00000000000 15130200524 0021175 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/fastapi/mypy/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0023274 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/fastapi/mypy/docs001.py 0000664 0000000 0000000 00000000605 15130200524 0022721 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id = ormar.Integer(primary_key=True)
name = ormar.String(max_length=100)
completed = ormar.Boolean(default=False)
collerek-ormar-c09209a/docs_src/fields/ 0000775 0000000 0000000 00000000000 15130200524 0020016 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/fields/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022115 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/fields/docs001.py 0000664 0000000 0000000 00000002273 15130200524 0021545 0 ustar 00root root 0000000 0000000 import asyncio
from typing import Optional
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL), metadata=sqlalchemy.MetaData()
)
class Department(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="departments")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
department: Optional[Department] = ormar.ForeignKey(Department)
@create_drop_database(base_config=ormar_base_config)
async def verify():
department = await Department(name="Science").save()
course = Course(name="Math", completed=False, department=department)
print(department.courses[0])
# Will produce:
# Course(id=None,
# name='Math',
# completed=False,
# department=Department(id=None, name='Science'))
await course.save()
asyncio.run(verify())
collerek-ormar-c09209a/docs_src/fields/docs002.py 0000664 0000000 0000000 00000001762 15130200524 0021550 0 ustar 00root root 0000000 0000000 from typing import Optional
import databases
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL), metadata=sqlalchemy.MetaData()
)
class Department(ormar.Model):
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
department: Optional[Department] = ormar.ForeignKey(
Department, related_name="my_courses"
)
department = Department(name="Science")
course = Course(name="Math", completed=False, department=department)
print(department.my_courses[0])
# Will produce:
# Course(id=None,
# name='Math',
# completed=False,
# department=Department(id=None, name='Science'))
collerek-ormar-c09209a/docs_src/fields/docs003.py 0000664 0000000 0000000 00000001325 15130200524 0021544 0 ustar 00root root 0000000 0000000 from typing import Optional
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Department(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
department: Optional[Department] = ormar.ForeignKey(Department)
collerek-ormar-c09209a/docs_src/fields/docs004.py 0000664 0000000 0000000 00000001167 15130200524 0021551 0 ustar 00root root 0000000 0000000 from datetime import datetime
import databases
import ormar
import sqlalchemy
from sqlalchemy import func, text
database = databases.Database("sqlite:///test.db")
metadata = sqlalchemy.MetaData()
class Product(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database, metadata=metadata, tablename="product"
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
company: str = ormar.String(max_length=200, server_default="Acme")
sort_order: int = ormar.Integer(server_default=text("10"))
created: datetime = ormar.DateTime(server_default=func.now())
collerek-ormar-c09209a/docs_src/models/ 0000775 0000000 0000000 00000000000 15130200524 0020033 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/models/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022132 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/models/docs001.py 0000664 0000000 0000000 00000000625 15130200524 0021561 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
collerek-ormar-c09209a/docs_src/models/docs002.py 0000664 0000000 0000000 00000001101 15130200524 0021550 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
# if you omit this parameter it will be created automatically
# as class.__name__.lower()+'s' -> "courses" in this example
tablename="my_courses",
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
collerek-ormar-c09209a/docs_src/models/docs003.py 0000664 0000000 0000000 00000001651 15130200524 0021563 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
print(Course.model_fields)
"""
Will produce:
{'id': Field(name='id',
type=Optional[int],
required=False,
default=None),
'name': Field(name='name',
type=Optional[str],
required=False,
default=None),
'completed': Field(name='completed',
type=bool,
required=False,
default=False)}
"""
collerek-ormar-c09209a/docs_src/models/docs004.py 0000664 0000000 0000000 00000001244 15130200524 0021562 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL), metadata=sqlalchemy.MetaData()
)
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
tablename="courses",
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
print(Course.ormar_config.table.columns)
"""
Will produce:
ImmutableColumnCollection(courses.id, courses.name, courses.completed)
"""
collerek-ormar-c09209a/docs_src/models/docs005.py 0000664 0000000 0000000 00000011006 15130200524 0021560 0 ustar 00root root 0000000 0000000 import pprint
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
pprint.pp({x: v.__dict__ for x, v in Course.ormar_config.model_fields.items()})
"""
Will produce:
{'id': {'__type__': ,
'__pydantic_type__': ,
'__sample__': 0,
'related_name': None,
'column_type': Integer(),
'constraints': [],
'name': 'id',
'db_alias': None,
'primary_key': True,
'autoincrement': True,
'nullable': True,
'sql_nullable': False,
'index': False,
'unique': False,
'virtual': None,
'is_multi': None,
'is_relation': None,
'is_through': False,
'through_relation_name': None,
'through_reverse_relation_name': None,
'skip_reverse': False,
'skip_field': False,
'owner': ,
'to': None,
'to_pk_only': None,
'through': None,
'self_reference': False,
'self_reference_primary': None,
'orders_by': None,
'related_orders_by': None,
'encrypt_secret': None,
'encrypt_backend': ,
'encrypt_custom_backend': None,
'ormar_default': None,
'server_default': None,
'comment': None,
'represent_as_base64_str': False,
'minimum': None,
'maximum': None,
'multiple_of': None,
'ge': None,
'le': None},
'name': {'__type__': ,
'__pydantic_type__': ,
'__sample__': 'string',
'related_name': None,
'column_type': String(length=100),
'constraints': [],
'name': 'name',
'db_alias': None,
'primary_key': False,
'autoincrement': False,
'nullable': False,
'sql_nullable': False,
'index': False,
'unique': False,
'virtual': None,
'is_multi': None,
'is_relation': None,
'is_through': False,
'through_relation_name': None,
'through_reverse_relation_name': None,
'skip_reverse': False,
'skip_field': False,
'owner': ,
'to': None,
'to_pk_only': None,
'through': None,
'self_reference': False,
'self_reference_primary': None,
'orders_by': None,
'related_orders_by': None,
'encrypt_secret': None,
'encrypt_backend': ,
'encrypt_custom_backend': None,
'ormar_default': None,
'server_default': None,
'comment': None,
'represent_as_base64_str': False,
'max_length': 100,
'min_length': None,
'regex': None},
'completed': {'__type__': ,
'__pydantic_type__': ,
'__sample__': True,
'related_name': None,
'column_type': Boolean(),
'constraints': [],
'name': 'completed',
'db_alias': None,
'primary_key': False,
'autoincrement': False,
'nullable': True,
'sql_nullable': True,
'index': False,
'unique': False,
'virtual': None,
'is_multi': None,
'is_relation': None,
'is_through': False,
'through_relation_name': None,
'through_reverse_relation_name': None,
'skip_reverse': False,
'skip_field': False,
'owner': ,
'to': None,
'to_pk_only': None,
'through': None,
'self_reference': False,
'self_reference_primary': None,
'orders_by': None,
'related_orders_by': None,
'encrypt_secret': None,
'encrypt_backend': ,
'encrypt_custom_backend': None,
'ormar_default': False,
'server_default': None,
'comment': None,
'represent_as_base64_str': False}}
"""
collerek-ormar-c09209a/docs_src/models/docs006.py 0000664 0000000 0000000 00000001234 15130200524 0021563 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
# define your constraints in OrmarConfig of the model
# it's a list that can contain multiple constraints
# hera a combination of name and column will have to be unique in db
constraints=[ormar.UniqueColumns("name", "completed")],
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
collerek-ormar-c09209a/docs_src/models/docs007.py 0000664 0000000 0000000 00000001403 15130200524 0021562 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL), metadata=sqlalchemy.MetaData()
)
class Course(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="courses")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
course = Course(name="Painting for dummies", completed=False)
await course.save()
await Course.objects.create(name="Painting for dummies", completed=False)
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/models/docs008.py 0000664 0000000 0000000 00000001130 15130200524 0021560 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
DATABASE_URl = "sqlite:///test.db"
database = databases.Database(DATABASE_URl, force_rollback=True)
metadata = sqlalchemy.MetaData()
class Child(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="children",
)
id: int = ormar.Integer(name="child_id", primary_key=True)
first_name: str = ormar.String(name="fname", max_length=100)
last_name: str = ormar.String(name="lname", max_length=100)
born_year: int = ormar.Integer(name="year_born", nullable=True)
collerek-ormar-c09209a/docs_src/models/docs009.py 0000664 0000000 0000000 00000001660 15130200524 0021571 0 ustar 00root root 0000000 0000000 from typing import Optional
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///test.db", force_rollback=True)
metadata = sqlalchemy.MetaData()
class Artist(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="artists",
)
id: int = ormar.Integer(name="artist_id", primary_key=True)
first_name: str = ormar.String(name="fname", max_length=100)
last_name: str = ormar.String(name="lname", max_length=100)
born_year: int = ormar.Integer(name="year")
class Album(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="music_albums",
)
id: int = ormar.Integer(name="album_id", primary_key=True)
name: str = ormar.String(name="album_name", max_length=100)
artist: Optional[Artist] = ormar.ForeignKey(Artist, name="artist_id")
collerek-ormar-c09209a/docs_src/models/docs010.py 0000664 0000000 0000000 00000002324 15130200524 0021557 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
DATABASE_URl = "sqlite:///test.db"
database = databases.Database(DATABASE_URl, force_rollback=True)
metadata = sqlalchemy.MetaData()
class Child(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="children",
)
id: int = ormar.Integer(name="child_id", primary_key=True)
first_name: str = ormar.String(name="fname", max_length=100)
last_name: str = ormar.String(name="lname", max_length=100)
born_year: int = ormar.Integer(name="year_born", nullable=True)
class ArtistChildren(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="children_x_artists",
)
class Artist(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
tablename="artists",
)
id: int = ormar.Integer(name="artist_id", primary_key=True)
first_name: str = ormar.String(name="fname", max_length=100)
last_name: str = ormar.String(name="lname", max_length=100)
born_year: int = ormar.Integer(name="year")
children = ormar.ManyToMany(Child, through=ArtistChildren)
collerek-ormar-c09209a/docs_src/models/docs012.py 0000664 0000000 0000000 00000000605 15130200524 0021561 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id = ormar.Integer(primary_key=True)
name = ormar.String(max_length=100)
completed = ormar.Boolean(default=False)
collerek-ormar-c09209a/docs_src/models/docs013.py 0000664 0000000 0000000 00000001537 15130200524 0021567 0 ustar 00root root 0000000 0000000 from typing import Optional
import databases
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Artist(ormar.Model):
# note that tablename is optional
# if not provided ormar will user class.__name__.lower()+'s'
# -> artists in this example
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=100)
last_name: str = ormar.String(max_length=100)
born_year: int = ormar.Integer(name="year")
class Album(ormar.Model):
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
artist: Optional[Artist] = ormar.ForeignKey(Artist)
collerek-ormar-c09209a/docs_src/models/docs014.py 0000664 0000000 0000000 00000000734 15130200524 0021566 0 ustar 00root root 0000000 0000000 import databases
import ormar
import pydantic
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
non_db_field: str = pydantic.Field(max_length=100)
collerek-ormar-c09209a/docs_src/models/docs015.py 0000664 0000000 0000000 00000000756 15130200524 0021573 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
@property
def prefixed_name(self):
return "custom_prefix__" + self.name
collerek-ormar-c09209a/docs_src/models/docs016.py 0000664 0000000 0000000 00000000732 15130200524 0021566 0 ustar 00root root 0000000 0000000 import databases
import ormar
import pydantic
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
model_config = pydantic.ConfigDict(frozen=True)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
collerek-ormar-c09209a/docs_src/models/docs017.py 0000664 0000000 0000000 00000001243 15130200524 0021565 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
# define your constraints in OrmarConfig of the model
# it's a list that can contain multiple constraints
# hera a combination of name and column will have a compound index in the db
constraints=[ormar.IndexColumns("name", "completed")],
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
collerek-ormar-c09209a/docs_src/models/docs018.py 0000664 0000000 0000000 00000001404 15130200524 0021565 0 ustar 00root root 0000000 0000000 import datetime
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
# define your constraints in OrmarConfig of the model
# it's a list that can contain multiple constraints
# hera a combination of name and column will have a level check in the db
constraints=[
ormar.CheckColumns("start_time < end_time", name="date_check"),
],
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
start_date: datetime.date = ormar.Date()
end_date: datetime.date = ormar.Date()
collerek-ormar-c09209a/docs_src/queries/ 0000775 0000000 0000000 00000000000 15130200524 0020225 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/queries/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022324 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/queries/docs001.py 0000664 0000000 0000000 00000001262 15130200524 0021751 0 ustar 00root root 0000000 0000000 from typing import Optional
import databases
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Album(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="album")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Track(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="track")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
collerek-ormar-c09209a/docs_src/queries/docs002.py 0000664 0000000 0000000 00000002231 15130200524 0021747 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Book(ormar.Model):
ormar_config = ormar_base_config.copy(
tablename="books",
)
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
author: str = ormar.String(max_length=100)
genre: str = ormar.String(
max_length=100,
default="Fiction",
)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
await Book.objects.create(
title="Tom Sawyer", author="Twain, Mark", genre="Adventure"
)
await Book.objects.create(
title="War and Peace", author="Tolstoy, Leo", genre="Fiction"
)
await Book.objects.create(
title="Anna Karenina", author="Tolstoy, Leo", genre="Fiction"
)
await Book.objects.update(each=True, genre="Fiction")
all_books = await Book.objects.filter(genre="Fiction").all()
assert len(all_books) == 3
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/queries/docs003.py 0000664 0000000 0000000 00000002601 15130200524 0021751 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Book(ormar.Model):
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
author: str = ormar.String(max_length=100)
genre: str = ormar.String(
max_length=100,
default="Fiction",
)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
await Book.objects.create(
title="Tom Sawyer", author="Twain, Mark", genre="Adventure"
)
await Book.objects.create(
title="War and Peace", author="Tolstoy, Leo", genre="Fiction"
)
await Book.objects.create(
title="Anna Karenina", author="Tolstoy, Leo", genre="Fiction"
)
# if not exist the instance will be persisted in db
vol2 = await Book.objects.update_or_create(
title="Volume II", author="Anonymous", genre="Fiction"
)
assert await Book.objects.count() == 4
# if pk or pkname passed in kwargs (like id here) the object will be updated
assert await Book.objects.update_or_create(id=vol2.id, genre="Historic")
assert await Book.objects.count() == 4
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/queries/docs004.py 0000664 0000000 0000000 00000001651 15130200524 0021756 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class ToDo(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="todos")
id: int = ormar.Integer(primary_key=True)
text: str = ormar.String(max_length=500)
completed = ormar.Boolean(default=False)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
# create multiple instances at once with bulk_create
await ToDo.objects.bulk_create(
[
ToDo(text="Buy the groceries."),
ToDo(text="Call Mum.", completed=True),
ToDo(text="Send invoices.", completed=True),
]
)
todoes = await ToDo.objects.all()
assert len(todoes) == 3
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/queries/docs005.py 0000664 0000000 0000000 00000002403 15130200524 0021753 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Book(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
author: str = ormar.String(max_length=100)
genre: str = ormar.String(
max_length=100,
default="Fiction",
)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
await Book.objects.create(
title="Tom Sawyer", author="Twain, Mark", genre="Adventure"
)
await Book.objects.create(
title="War and Peace in Space", author="Tolstoy, Leo", genre="Fantasy"
)
await Book.objects.create(
title="Anna Karenina", author="Tolstoy, Leo", genre="Fiction"
)
# delete accepts kwargs that will be used in filter
# acting in same way as queryset.filter(**kwargs).delete()
await Book.objects.delete(genre="Fantasy") # delete all fantasy books
all_books = await Book.objects.all()
assert len(all_books) == 2
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/queries/docs006.py 0000664 0000000 0000000 00000003252 15130200524 0021757 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Company(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
founded: int = ormar.Integer(nullable=True)
class Car(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="cars")
id: int = ormar.Integer(primary_key=True)
manufacturer = ormar.ForeignKey(Company)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
gearbox_type: str = ormar.String(max_length=20, nullable=True)
gears: int = ormar.Integer(nullable=True)
aircon_type: str = ormar.String(max_length=20, nullable=True)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
# build some sample data
toyota = await Company.objects.create(name="Toyota", founded=1937)
await Car.objects.create(
manufacturer=toyota,
name="Corolla",
year=2020,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Yaris",
year=2019,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Supreme",
year=2020,
gearbox_type="Auto",
gears=6,
aircon_type="Auto",
)
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/queries/docs007.py 0000664 0000000 0000000 00000002444 15130200524 0021762 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Owner(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="owners")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Toy(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="toys")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
owner: Owner = ormar.ForeignKey(Owner)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
# build some sample data
aphrodite = await Owner.objects.create(name="Aphrodite")
hermes = await Owner.objects.create(name="Hermes")
zeus = await Owner.objects.create(name="Zeus")
await Toy.objects.create(name="Toy 4", owner=zeus)
await Toy.objects.create(name="Toy 5", owner=hermes)
await Toy.objects.create(name="Toy 2", owner=aphrodite)
await Toy.objects.create(name="Toy 1", owner=zeus)
await Toy.objects.create(name="Toy 3", owner=aphrodite)
await Toy.objects.create(name="Toy 6", owner=hermes)
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/queries/docs008.py 0000664 0000000 0000000 00000006702 15130200524 0021764 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
from pydantic import ValidationError
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Company(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
founded: int = ormar.Integer(nullable=True)
class Car(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="cars")
id: int = ormar.Integer(primary_key=True)
manufacturer = ormar.ForeignKey(Company)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
gearbox_type: str = ormar.String(max_length=20, nullable=True)
gears: int = ormar.Integer(nullable=True)
aircon_type: str = ormar.String(max_length=20, nullable=True)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
# build some sample data
toyota = await Company.objects.create(name="Toyota", founded=1937)
await Car.objects.create(
manufacturer=toyota,
name="Corolla",
year=2020,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Yaris",
year=2019,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Supreme",
year=2020,
gearbox_type="Auto",
gears=6,
aircon_type="Auto",
)
# select manufacturer but only name,
# to include related models use notation {model_name}__{column}
all_cars = (
await Car.objects.select_related("manufacturer")
.exclude_fields(
["year", "gearbox_type", "gears", "aircon_type", "manufacturer__founded"]
)
.all()
)
for car in all_cars:
# excluded columns will yield None
assert all(
getattr(car, x) is None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
# included column on related models will be available,
# pk column is always included
# even if you do not include it in fields list
assert car.manufacturer.name == "Toyota"
# also in the nested related models -
# you cannot exclude pk - it's always auto added
assert car.manufacturer.founded is None
# fields() can be called several times,
# building up the columns to select,
# models selected in select_related
# but with no columns in fields list implies all fields
all_cars = (
await Car.objects.select_related("manufacturer")
.exclude_fields("year")
.exclude_fields(["gear", "gearbox_type"])
.all()
)
# all fiels from company model are selected
assert all_cars[0].manufacturer.name == "Toyota"
assert all_cars[0].manufacturer.founded == 1937
# cannot exclude mandatory model columns -
# manufacturer__name in this example - note usage of dict/set this time
try:
await Car.objects.select_related("manufacturer").exclude_fields(
{"manufacturer": {"name"}}
).all()
except ValidationError:
# will raise pydantic ValidationError as company.name is required
pass
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/queries/docs009.py 0000664 0000000 0000000 00000004314 15130200524 0021762 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Company(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
founded: int = ormar.Integer(nullable=True)
class Car(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="cars")
id: int = ormar.Integer(primary_key=True)
manufacturer = ormar.ForeignKey(Company)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
gearbox_type: str = ormar.String(max_length=20, nullable=True)
gears: int = ormar.Integer(nullable=True)
aircon_type: str = ormar.String(max_length=20, nullable=True)
@create_drop_database(base_config=ormar_base_config)
async def run_query():
# 1. like in example above
await Car.objects.select_related("manufacturer").fields(
["id", "name", "manufacturer__name"]
).all()
# 2. to mark a field as required use ellipsis
await Car.objects.select_related("manufacturer").fields(
{"id": ..., "name": ..., "manufacturer": {"name": ...}}
).all()
# 3. to include whole nested model use ellipsis
await Car.objects.select_related("manufacturer").fields(
{"id": ..., "name": ..., "manufacturer": ...}
).all()
# 4. to specify fields at last nesting level you can also use set
# - equivalent to 2. above
await Car.objects.select_related("manufacturer").fields(
{"id": ..., "name": ..., "manufacturer": {"name"}}
).all()
# 5. of course set can have multiple fields
await Car.objects.select_related("manufacturer").fields(
{"id": ..., "name": ..., "manufacturer": {"name", "founded"}}
).all()
# 6. you can include all nested fields,
# but it will be equivalent of 3. above which is shorter
await Car.objects.select_related("manufacturer").fields(
{"id": ..., "name": ..., "manufacturer": {"id", "name", "founded"}}
).all()
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/relations/ 0000775 0000000 0000000 00000000000 15130200524 0020550 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/relations/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022647 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/relations/docs001.py 0000664 0000000 0000000 00000002410 15130200524 0022270 0 ustar 00root root 0000000 0000000 from typing import Dict, Optional, Union
import databases
import ormar
import sqlalchemy
database = databases.Database("sqlite:///db.sqlite")
metadata = sqlalchemy.MetaData()
class Department(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = ormar.OrmarConfig(
database=database,
metadata=metadata,
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
department: Optional[Union[Department, Dict]] = ormar.ForeignKey(Department)
department = Department(name="Science")
# set up a relation with actual Model instance
course = Course(name="Math", completed=False, department=department)
# set up relation with only related model pk value
course2 = Course(name="Math II", completed=False, department=department.pk)
# set up a relation with dictionary corresponding to related model
course3 = Course(name="Math III", completed=False, department=department.model_dump())
# explicitly set up None
course4 = Course(name="Math III", completed=False, department=None)
collerek-ormar-c09209a/docs_src/relations/docs002.py 0000664 0000000 0000000 00000001714 15130200524 0022277 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import databases
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL), metadata=sqlalchemy.MetaData()
)
class Author(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=80)
last_name: str = ormar.String(max_length=80)
class Category(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Post(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category)
author: Optional[Author] = ormar.ForeignKey(Author)
collerek-ormar-c09209a/docs_src/relations/docs003.py 0000664 0000000 0000000 00000001225 15130200524 0022275 0 ustar 00root root 0000000 0000000 from typing import Dict, Optional, Union
import databases
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL), metadata=sqlalchemy.MetaData()
)
class Department(ormar.Model):
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
department: Optional[Union[Department, Dict]] = ormar.ForeignKey(Department)
collerek-ormar-c09209a/docs_src/relations/docs004.py 0000664 0000000 0000000 00000001566 15130200524 0022306 0 ustar 00root root 0000000 0000000 import databases
import ormar
import sqlalchemy
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL), metadata=sqlalchemy.MetaData()
)
class Category(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="categories")
id = ormar.Integer(primary_key=True)
name = ormar.String(max_length=40)
class PostCategory(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="posts_x_categories")
id: int = ormar.Integer(primary_key=True)
sort_order: int = ormar.Integer(nullable=True)
param_name: str = ormar.String(default="Name", max_length=200)
class Post(ormar.Model):
ormar_config = ormar_base_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories = ormar.ManyToMany(Category, through=PostCategory)
collerek-ormar-c09209a/docs_src/select_columns/ 0000775 0000000 0000000 00000000000 15130200524 0021567 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/select_columns/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0023666 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/select_columns/docs001.py 0000664 0000000 0000000 00000003267 15130200524 0023322 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
from tests.settings import DATABASE_URL
base_ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL, force_rollback=True),
metadata=sqlalchemy.MetaData(),
)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
founded: int = ormar.Integer(nullable=True)
class Car(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
manufacturer = ormar.ForeignKey(Company)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
gearbox_type: str = ormar.String(max_length=20, nullable=True)
gears: int = ormar.Integer(nullable=True)
aircon_type: str = ormar.String(max_length=20, nullable=True)
@create_drop_database(base_config=base_ormar_config)
async def sample_data():
# build some sample data
toyota = await Company.objects.create(name="Toyota", founded=1937)
await Car.objects.create(
manufacturer=toyota,
name="Corolla",
year=2020,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Yaris",
year=2019,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Supreme",
year=2020,
gearbox_type="Auto",
gears=6,
aircon_type="Auto",
)
asyncio.run(sample_data())
collerek-ormar-c09209a/docs_src/signals/ 0000775 0000000 0000000 00000000000 15130200524 0020210 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/signals/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022307 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/docs_src/signals/docs002.py 0000664 0000000 0000000 00000002421 15130200524 0021733 0 ustar 00root root 0000000 0000000 import asyncio
import databases
import ormar
import sqlalchemy
from examples import create_drop_database
from ormar import pre_update
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
)
class Album(ormar.Model):
ormar_config = ormar_base_config.copy(
tablename="albums",
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
play_count: int = ormar.Integer(default=0)
@pre_update(Album)
async def before_update(sender, instance, **kwargs):
if instance.play_count > 50 and not instance.is_best_seller:
instance.is_best_seller = True
@create_drop_database(base_config=ormar_base_config)
async def run_query():
# here album.play_count ans is_best_seller get default values
album = await Album.objects.create(name="Venice")
assert not album.is_best_seller
assert album.play_count == 0
album.play_count = 30
# here a trigger is called but play_count is too low
await album.update()
assert not album.is_best_seller
album.play_count = 60
await album.update()
assert album.is_best_seller
asyncio.run(run_query())
collerek-ormar-c09209a/docs_src/test_all_docs.py 0000664 0000000 0000000 00000001006 15130200524 0021736 0 ustar 00root root 0000000 0000000 import subprocess
import sys
from pathlib import Path
import pytest
filepaths = []
path = Path(__file__).parent
for p in path.rglob("*"):
print(p.name)
for p in path.rglob("*"):
if p.name.endswith(".py") and not p.name == "__init__.py" and p != Path(__file__):
filepath_ = str(p.resolve())
filepaths.append(filepath_)
@pytest.mark.parametrize("filepath", filepaths)
def test_all_docs(filepath: str):
result = subprocess.run([sys.executable, filepath])
assert result.returncode == 0
collerek-ormar-c09209a/examples/ 0000775 0000000 0000000 00000000000 15130200524 0016567 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/examples/__init__.py 0000664 0000000 0000000 00000000114 15130200524 0020674 0 ustar 00root root 0000000 0000000 from .utils import create_drop_database
__all__ = ["create_drop_database"]
collerek-ormar-c09209a/examples/fastapi_quick_start.py 0000664 0000000 0000000 00000004230 15130200524 0023200 0 ustar 00root root 0000000 0000000 from contextlib import asynccontextmanager
from typing import List, Optional
import databases
import ormar
import sqlalchemy
import uvicorn
from fastapi import FastAPI
DATABASE_URL = "sqlite:///test.db"
ormar_base_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL), metadata=sqlalchemy.MetaData()
)
@asynccontextmanager
async def lifespan(app: FastAPI):
database_ = app.state.database
if not database_.is_connected:
await database_.connect()
yield
database_ = app.state.database
if database_.is_connected:
await database_.disconnect()
app = FastAPI(lifespan=lifespan)
metadata = sqlalchemy.MetaData()
database = databases.Database("sqlite:///test.db")
app.state.database = database
class Category(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = ormar_base_config.copy(tablename="items")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
@app.get("/items/", response_model=List[Item])
async def get_items():
items = await Item.objects.select_related("category").all()
return items
@app.post("/items/", response_model=Item)
async def create_item(item: Item):
await item.save()
return item
@app.post("/categories/", response_model=Category)
async def create_category(category: Category):
await category.save()
return category
@app.put("/items/{item_id}")
async def get_item(item_id: int, item: Item):
item_db = await Item.objects.get(pk=item_id)
return await item_db.update(**item.model_dump())
@app.delete("/items/{item_id}")
async def delete_item(item_id: int, item: Item = None):
if item:
return {"deleted_rows": await item.delete()}
item_db = await Item.objects.get(pk=item_id)
return {"deleted_rows": await item_db.delete()}
if __name__ == "__main__":
# to play with API run the script and visit http://127.0.0.1:8000/docs
uvicorn.run(app, host="127.0.0.1", port=8000)
collerek-ormar-c09209a/examples/script_from_readme.py 0000664 0000000 0000000 00000034146 15130200524 0023015 0 ustar 00root root 0000000 0000000 from typing import Optional
import databases
import ormar
import pydantic
import sqlalchemy
DATABASE_URL = "sqlite:///db.sqlite"
# note that this step is optional -> all ormar cares is an individual
# OrmarConfig for each of the models, but this way you do not
# have to repeat the same parameters if you use only one database
base_ormar_config = ormar.OrmarConfig(
database=databases.Database(DATABASE_URL),
metadata=sqlalchemy.MetaData(),
engine=sqlalchemy.create_engine(DATABASE_URL),
)
# Note that all type hints are optional
# below is a perfectly valid model declaration
# class Author(ormar.Model):
# ormar_config = base_ormar_config.copy(tablename="authors")
#
# id = ormar.Integer(primary_key=True) # <= notice no field types
# name = ormar.String(max_length=100)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
# create the database
# note that in production you should use migrations
# note that this is not required if you connect to existing database
# just to be sure we clear the db before
base_ormar_config.metadata.drop_all(base_ormar_config.engine)
base_ormar_config.metadata.create_all(base_ormar_config.engine)
# all functions below are divided into functionality categories
# note how all functions are defined with async - hence can use await AND needs to
# be awaited on their own
async def create():
# Create some records to work with through QuerySet.create method.
# Note that queryset is exposed on each Model's class as objects
tolkien = await Author.objects.create(name="J.R.R. Tolkien")
await Book.objects.create(author=tolkien, title="The Hobbit", year=1937)
await Book.objects.create(author=tolkien, title="The Lord of the Rings", year=1955)
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
# alternative creation of object divided into 2 steps
sapkowski = Author(name="Andrzej Sapkowski")
# do some stuff
await sapkowski.save()
# or save() after initialization
await Book(author=sapkowski, title="The Witcher", year=1990).save()
await Book(author=sapkowski, title="The Tower of Fools", year=2002).save()
# to read more about inserting data into the database
# visit: https://collerek.github.io/ormar/queries/create/
async def read():
# Fetch an instance, without loading a foreign key relationship on it.
# Django style
book = await Book.objects.get(title="The Hobbit")
# or python style
book = await Book.objects.get(Book.title == "The Hobbit")
book2 = await Book.objects.first()
# first() fetch the instance with lower primary key value
assert book == book2
# you can access all fields on loaded model
assert book.title == "The Hobbit"
assert book.year == 1937
# when no condition is passed to get()
# it behaves as last() based on primary key column
book3 = await Book.objects.get()
assert book3.title == "The Tower of Fools"
# When you have a relation, ormar always defines a related model for you
# even when all you loaded is a foreign key value like in this example
assert isinstance(book.author, Author)
# primary key is populated from foreign key stored in books table
assert book.author.pk == 1
# since the related model was not loaded all other fields are None
assert book.author.name is None
# Load the relationship from the database when you already have the related model
# alternatively see joins section below
await book.author.load()
assert book.author.name == "J.R.R. Tolkien"
# get all rows for given model
authors = await Author.objects.all()
assert len(authors) == 2
# to read more about reading data from the database
# visit: https://collerek.github.io/ormar/queries/read/
async def update():
# read existing row from db
tolkien = await Author.objects.get(name="J.R.R. Tolkien")
assert tolkien.name == "J.R.R. Tolkien"
tolkien_id = tolkien.id
# change the selected property
tolkien.name = "John Ronald Reuel Tolkien"
# call update on a model instance
await tolkien.update()
# confirm that object was updated
tolkien = await Author.objects.get(name="John Ronald Reuel Tolkien")
assert tolkien.name == "John Ronald Reuel Tolkien"
assert tolkien.id == tolkien_id
# alternatively update data without loading
await Author.objects.filter(name__contains="Tolkien").update(name="J.R.R. Tolkien")
# to read more about updating data in the database
# visit: https://collerek.github.io/ormar/queries/update/
async def delete():
silmarillion = await Book.objects.get(year=1977)
# call delete() on instance
await silmarillion.delete()
# alternatively delete without loading
await Book.objects.delete(title="The Tower of Fools")
# note that when there is no record ormar raises NoMatch exception
try:
await Book.objects.get(year=1977)
except ormar.NoMatch:
print("No book from 1977!")
# to read more about deleting data from the database
# visit: https://collerek.github.io/ormar/queries/delete/
# note that despite the fact that record no longer exists in database
# the object above is still accessible and you can use it (and i.e. save()) again.
tolkien = silmarillion.author
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
async def joins():
# Tho join two models use select_related
# Django style
book = await Book.objects.select_related("author").get(title="The Hobbit")
# Python style
book = await Book.objects.select_related(Book.author).get(
Book.title == "The Hobbit"
)
# now the author is already prefetched
assert book.author.name == "J.R.R. Tolkien"
# By default you also get a second side of the relation
# constructed as lowercase source model name +'s' (books in this case)
# you can also provide custom name with parameter related_name
# Django style
author = await Author.objects.select_related("books").all(name="J.R.R. Tolkien")
# Python style
author = await Author.objects.select_related(Author.books).all(
Author.name == "J.R.R. Tolkien"
)
assert len(author[0].books) == 3
# for reverse and many to many relations you can also prefetch_related
# that executes a separate query for each of related models
# Django style
author = await Author.objects.prefetch_related("books").get(name="J.R.R. Tolkien")
# Python style
author = await Author.objects.prefetch_related(Author.books).get(
Author.name == "J.R.R. Tolkien"
)
assert len(author.books) == 3
# to read more about relations
# visit: https://collerek.github.io/ormar/relations/
# to read more about joins and subqueries
# visit: https://collerek.github.io/ormar/queries/joins-and-subqueries/
async def filter_and_sort():
# to filter the query you can use filter() or pass key-value pars to
# get(), all() etc.
# to use special methods or access related model fields use double
# underscore like to filter by the name of the author use author__name
# Django style
books = await Book.objects.all(author__name="J.R.R. Tolkien")
# python style
books = await Book.objects.all(Book.author.name == "J.R.R. Tolkien")
assert len(books) == 3
# filter can accept special methods also separated with double underscore
# to issue sql query ` where authors.name like "%tolkien%"` that is not
# case sensitive (hence small t in Tolkien)
# Django style
books = await Book.objects.filter(author__name__icontains="tolkien").all()
# python style
books = await Book.objects.filter(Book.author.name.icontains("tolkien")).all()
assert len(books) == 3
# to sort use order_by() function of queryset
# to sort decreasing use hyphen before the field name
# same as with filter you can use double underscores to access related fields
# Django style
books = (
await Book.objects.filter(author__name__icontains="tolkien")
.order_by("-year")
.all()
)
# python style
books = (
await Book.objects.filter(Book.author.name.icontains("tolkien"))
.order_by(Book.year.desc())
.all()
)
assert len(books) == 3
assert books[0].title == "The Silmarillion"
assert books[2].title == "The Hobbit"
# to read more about filtering and ordering
# visit: https://collerek.github.io/ormar/queries/filter-and-sort/
async def subset_of_columns():
# to exclude some columns from loading when querying the database
# you can use fields() method
hobbit = await Book.objects.fields(["title"]).get(title="The Hobbit")
# note that fields not included in fields are empty (set to None)
assert hobbit.year is None
assert hobbit.author is None
# selected field is there
assert hobbit.title == "The Hobbit"
# alternatively you can provide columns you want to exclude
hobbit = await Book.objects.exclude_fields(["year"]).get(title="The Hobbit")
# year is still not set
assert hobbit.year is None
# but author is back
assert hobbit.author is not None
# also you cannot exclude primary key column - it's always there
# even if you EXPLICITLY exclude it it will be there
# note that each model have a shortcut for primary_key column which is pk
# and you can filter/access/set the values by this alias like below
assert hobbit.pk is not None
# note that you cannot exclude fields that are not nullable
# (required) in model definition
try:
await Book.objects.exclude_fields(["title"]).get(title="The Hobbit")
except pydantic.ValidationError:
print("Cannot exclude non nullable field title")
# to read more about selecting subset of columns
# visit: https://collerek.github.io/ormar/queries/select-columns/
async def pagination():
# to limit number of returned rows use limit()
books = await Book.objects.limit(1).all()
assert len(books) == 1
assert books[0].title == "The Hobbit"
# to offset number of returned rows use offset()
books = await Book.objects.limit(1).offset(1).all()
assert len(books) == 1
assert books[0].title == "The Lord of the Rings"
# alternatively use paginate that combines both
books = await Book.objects.paginate(page=2, page_size=2).all()
assert len(books) == 2
# note that we removed one book of Sapkowski in delete()
# and recreated The Silmarillion - by default when no order_by is set
# ordering sorts by primary_key column
assert books[0].title == "The Witcher"
assert books[1].title == "The Silmarillion"
# to read more about pagination and number of rows
# visit: https://collerek.github.io/ormar/queries/pagination-and-rows-number/
async def aggregations():
# count:
assert 2 == await Author.objects.count()
# exists
assert await Book.objects.filter(title="The Hobbit").exists()
# maximum
assert 1990 == await Book.objects.max(columns=["year"])
# minimum
assert 1937 == await Book.objects.min(columns=["year"])
# average
assert 1964.75 == await Book.objects.avg(columns=["year"])
# sum
assert 7859 == await Book.objects.sum(columns=["year"])
# to read more about aggregated functions
# visit: https://collerek.github.io/ormar/queries/aggregations/
async def raw_data():
# extract raw data in a form of dicts or tuples
# note that this skips the validation(!) as models are
# not created from parsed data
# get list of objects as dicts
assert await Book.objects.values() == [
{"id": 1, "author": 1, "title": "The Hobbit", "year": 1937},
{"id": 2, "author": 1, "title": "The Lord of the Rings", "year": 1955},
{"id": 4, "author": 2, "title": "The Witcher", "year": 1990},
{"id": 5, "author": 1, "title": "The Silmarillion", "year": 1977},
]
# get list of objects as tuples
assert await Book.objects.values_list() == [
(1, 1, "The Hobbit", 1937),
(2, 1, "The Lord of the Rings", 1955),
(4, 2, "The Witcher", 1990),
(5, 1, "The Silmarillion", 1977),
]
# filter data - note how you always get a list
assert await Book.objects.filter(title="The Hobbit").values() == [
{"id": 1, "author": 1, "title": "The Hobbit", "year": 1937}
]
# select only wanted fields
assert await Book.objects.filter(title="The Hobbit").values(["id", "title"]) == [
{"id": 1, "title": "The Hobbit"}
]
# if you select only one column you could flatten it with values_list
assert await Book.objects.values_list("title", flatten=True) == [
"The Hobbit",
"The Lord of the Rings",
"The Witcher",
"The Silmarillion",
]
# to read more about extracting raw values
# visit: https://collerek.github.io/ormar/queries/aggregations/
async def with_connect(function):
# note that for any other backend than sqlite you actually need to
# connect to the database to perform db operations
async with base_ormar_config.database:
await function()
# note that if you use framework like `fastapi` you shouldn't connect
# in your endpoints but have a global connection pool
# check https://collerek.github.io/ormar/fastapi/ and section with db connection
# gather and execute all functions
# note - normally import should be at the beginning of the file
import asyncio
# note that normally you use gather() function to run several functions
# concurrently but we actually modify the data and we rely on the order of functions
for func in [
create,
read,
update,
delete,
joins,
filter_and_sort,
subset_of_columns,
pagination,
aggregations,
raw_data,
]:
print(f"Executing: {func.__name__}")
asyncio.run(with_connect(func))
# drop the database tables
base_ormar_config.metadata.drop_all(base_ormar_config.engine)
collerek-ormar-c09209a/examples/utils.py 0000664 0000000 0000000 00000001202 15130200524 0020274 0 ustar 00root root 0000000 0000000 import functools
import ormar
import sqlalchemy
def create_drop_database(base_config: ormar.OrmarConfig) -> None:
# create all tables in the database before execution
# and drop them after, note that in production you should use migrations
def wrapper(func):
@functools.wraps(func)
async def wrapped(*args):
engine = sqlalchemy.create_engine(str(base_config.database.url))
base_config.metadata.drop_all(engine)
base_config.metadata.create_all(engine)
await func(*args)
base_config.metadata.drop_all(engine)
return wrapped
return wrapper
collerek-ormar-c09209a/mkdocs.yml 0000664 0000000 0000000 00000005716 15130200524 0016765 0 ustar 00root root 0000000 0000000 site_name: ormar
site_url: 'https://collerek.github.io/ormar/'
site_description: A simple async ORM with fastapi in mind and pydantic validation.
nav:
- Overview: index.md
- Installation: install.md
- Models:
- Definition: models/index.md
- Inheritance: models/inheritance.md
- Methods: models/methods.md
- Migrations: models/migrations.md
- Internals: models/internals.md
- Fields:
- Common parameters: fields/common-parameters.md
- Fields types: fields/field-types.md
- Pydantic only fields: fields/pydantic-fields.md
- Fields encryption: fields/encryption.md
- Relations:
- Relation types: relations/index.md
- relations/foreign-key.md
- relations/many-to-many.md
- relations/postponed-annotations.md
- relations/queryset-proxy.md
- Queries:
- queries/index.md
- queries/create.md
- queries/read.md
- queries/update.md
- queries/delete.md
- queries/joins-and-subqueries.md
- queries/filter-and-sort.md
- queries/select-columns.md
- queries/pagination-and-rows-number.md
- queries/aggregations.md
- Return raw data: queries/raw-data.md
- Signals: signals.md
- Transactions: transactions.md
- Use with Fastapi:
- Quick Start: fastapi/index.md
- Using ormar in responses: fastapi/response.md
- Using ormar in requests: fastapi/requests.md
- Use with mypy: mypy.md
- Migration to v 0.20: migration.md
- PyCharm plugin: plugin.md
- Contributing: contributing.md
- Release Notes: releases.md
- Api (BETA): api/
repo_name: collerek/ormar
repo_url: https://github.com/collerek/ormar
theme:
name: material
highlightjs: true
hljs_languages:
- python
palette:
- media: "(prefers-color-scheme: light)"
scheme: default
primary: indigo
accent: indigo
toggle:
icon: material/lightbulb
name: Switch to light mode
- media: "(prefers-color-scheme: dark)"
scheme: slate
primary: indigo
accent: indigo
toggle:
icon: material/lightbulb-outline
name: Switch to dark mode
analytics:
gtag: G-ZJWZYM5DNM
markdown_extensions:
- admonition
- pymdownx.superfences
- pymdownx.snippets:
base_path: docs
- pymdownx.inlinehilite
- pymdownx.highlight:
linenums: true
plugins:
- mike:
alias_type: symlink
canonical_version: latest
version_selector: true
- search
- gen-files:
scripts:
- docs/gen_ref_pages.py
- literate-nav:
nav_file: SUMMARY.md
- section-index
- mkdocstrings:
watch:
- ormar
handlers:
python:
selection:
docstring_style: sphinx
rendering:
show_submodules: no
extra:
analytics:
provider: google
property: UA-72514911-3
version:
provider: mike
extra_javascript:
- https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.1.1/highlight.min.js
- javascripts/config.js
extra_css:
- https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.1.1/styles/default.min.css
collerek-ormar-c09209a/ormar/ 0000775 0000000 0000000 00000000000 15130200524 0016071 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/__init__.py 0000664 0000000 0000000 00000006044 15130200524 0020206 0 ustar 00root root 0000000 0000000 """
The `ormar` package is an async mini ORM for Python, with support for **Postgres,
MySQL**, and **SQLite**.
The main benefit of using `ormar` are:
* getting an **async ORM that can be used with async frameworks**
(fastapi, starlette etc.)
* getting just **one model to maintain** - you don't have to maintain pydantic
and other orm model (sqlalchemy, peewee, gino etc.)
The goal was to create a simple ORM that can be **used directly
(as request and response models)
with `fastapi`** that bases it's data validation on pydantic.
Ormar - apart form obvious ORM in name - get it's name from ormar in swedish which means
snakes, and ormar(e) in italian which means cabinet.
And what's a better name for python ORM than snakes cabinet :)
"""
from ormar.protocols import QuerySetProtocol, RelationProtocol # noqa: I001
from importlib.metadata import version
from ormar.decorators import ( # noqa: I100
post_bulk_update,
post_delete,
post_relation_add,
post_relation_remove,
post_save,
post_update,
pre_delete,
pre_relation_add,
pre_relation_remove,
pre_save,
pre_update,
)
from ormar.exceptions import ( # noqa: I100
ModelDefinitionError,
MultipleMatches,
NoMatch,
)
from ormar.fields import (
DECODERS_MAP,
ENCODERS_MAP,
JSON,
SQL_ENCODERS_MAP,
UUID,
BaseField,
BigInteger,
Boolean,
CheckColumns,
Date,
DateTime,
Decimal,
EncryptBackends,
Enum,
Float,
ForeignKey,
ForeignKeyField,
IndexColumns,
Integer,
LargeBinary,
ManyToMany,
ManyToManyField,
ReferentialAction,
SmallInteger,
String,
Text,
Time,
UniqueColumns,
)
# noqa: I100
from ormar.models import ExcludableItems, Extra, Model, OrmarConfig
from ormar.queryset import OrderAction, QuerySet, and_, or_
from ormar.relations import RelationType
from ormar.signals import Signal
class UndefinedType: # pragma no cover
def __repr__(self) -> str:
return "OrmarUndefined"
Undefined = UndefinedType()
__version__ = version("ormar")
__all__ = [
"Integer",
"BigInteger",
"SmallInteger",
"Boolean",
"Time",
"Text",
"String",
"JSON",
"DateTime",
"Date",
"Decimal",
"Enum",
"Float",
"ManyToMany",
"Model",
"ModelDefinitionError",
"MultipleMatches",
"NoMatch",
"ForeignKey",
"QuerySet",
"RelationType",
"Undefined",
"UUID",
"UniqueColumns",
"IndexColumns",
"CheckColumns",
"ReferentialAction",
"QuerySetProtocol",
"RelationProtocol",
"post_bulk_update",
"post_delete",
"post_save",
"post_update",
"post_relation_add",
"post_relation_remove",
"pre_delete",
"pre_save",
"pre_update",
"pre_relation_remove",
"pre_relation_add",
"Signal",
"BaseField",
"ManyToManyField",
"ForeignKeyField",
"OrderAction",
"ExcludableItems",
"and_",
"or_",
"EncryptBackends",
"ENCODERS_MAP",
"SQL_ENCODERS_MAP",
"DECODERS_MAP",
"LargeBinary",
"Extra",
"OrmarConfig",
]
collerek-ormar-c09209a/ormar/decorators/ 0000775 0000000 0000000 00000000000 15130200524 0020236 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/decorators/__init__.py 0000664 0000000 0000000 00000001213 15130200524 0022344 0 ustar 00root root 0000000 0000000 """
Module with all decorators that are exposed for users.
Currently only:
* predefined signals decorators (pre/post + save/update/delete)
"""
from ormar.decorators.signals import (
post_bulk_update,
post_delete,
post_relation_add,
post_relation_remove,
post_save,
post_update,
pre_delete,
pre_relation_add,
pre_relation_remove,
pre_save,
pre_update,
)
__all__ = [
"post_bulk_update",
"post_delete",
"post_save",
"post_update",
"pre_delete",
"pre_save",
"pre_update",
"post_relation_remove",
"post_relation_add",
"pre_relation_remove",
"pre_relation_add",
]
collerek-ormar-c09209a/ormar/decorators/signals.py 0000664 0000000 0000000 00000014441 15130200524 0022254 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Callable, List, Type, Union
if TYPE_CHECKING: # pragma: no cover
from ormar import Model
def receiver(
signal: str, senders: Union[Type["Model"], List[Type["Model"]]]
) -> Callable:
"""
Connect given function to all senders for given signal name.
:param signal: name of the signal to register to
:type signal: str
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
def _decorator(func: Callable) -> Callable:
"""
Internal decorator that does all the registering.
:param func: function to register as receiver
:type func: Callable
:return: untouched function already registered for given signal
:rtype: Callable
"""
if not isinstance(senders, list):
_senders = [senders]
else:
_senders = senders
for sender in _senders:
signals = getattr(sender.ormar_config.signals, signal)
signals.connect(func)
return func
return _decorator
def post_save(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for post_save signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="post_save", senders=senders)
def post_update(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for post_update signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="post_update", senders=senders)
def post_delete(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for post_delete signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="post_delete", senders=senders)
def pre_save(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for pre_save signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="pre_save", senders=senders)
def pre_update(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for pre_update signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="pre_update", senders=senders)
def pre_delete(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for pre_delete signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="pre_delete", senders=senders)
def pre_relation_add(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for pre_relation_add signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="pre_relation_add", senders=senders)
def post_relation_add(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for post_relation_add signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="post_relation_add", senders=senders)
def pre_relation_remove(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for pre_relation_remove signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="pre_relation_remove", senders=senders)
def post_relation_remove(
senders: Union[Type["Model"], List[Type["Model"]]]
) -> Callable:
"""
Connect given function to all senders for post_relation_remove signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="post_relation_remove", senders=senders)
def post_bulk_update(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for post_bulk_update signal.
:param senders: one or a list of "Model" classes
that should have the signal receiver registered
:type senders: Union[Type["Model"], List[Type["Model"]]]
:return: returns the original function untouched
:rtype: Callable
"""
return receiver(signal="post_bulk_update", senders=senders)
collerek-ormar-c09209a/ormar/exceptions.py 0000664 0000000 0000000 00000003422 15130200524 0020625 0 ustar 00root root 0000000 0000000 """
Gathers all exceptions thrown by ormar.
"""
class AsyncOrmException(Exception):
"""
Base ormar Exception
"""
pass
class ModelDefinitionError(AsyncOrmException):
"""
Raised for errors related to the model definition itself:
* defining a Field without required parameters
* defining a model with more than one primary_key
* defining a model without primary_key
"""
pass
class ModelError(AsyncOrmException):
"""
Raised for initialization of model with non-existing field keyword.
"""
pass
class NoMatch(AsyncOrmException):
"""
Raised for database queries that has no matching result (empty result).
"""
pass
class MultipleMatches(AsyncOrmException):
"""
Raised for database queries that should return one row (i.e. get, first etc.)
but has multiple matching results in response.
"""
pass
class QueryDefinitionError(AsyncOrmException):
"""
Raised for errors in query definition:
* using contains or icontains filter with instance of the Model
* using Queryset.update() without filter and setting each flag to True
* using Queryset.delete() without filter and setting each flag to True
"""
pass
class RelationshipInstanceError(AsyncOrmException):
pass
class ModelPersistenceError(AsyncOrmException):
"""
Raised for update of models without primary_key set (cannot retrieve from db)
or for saving a model with relation to unsaved model (cannot extract fk value).
"""
pass
class SignalDefinitionError(AsyncOrmException):
"""
Raised when non callable receiver is passed as signal callback.
"""
pass
class ModelListEmptyError(AsyncOrmException):
"""
Raised for objects is empty when bulk_update
"""
pass
collerek-ormar-c09209a/ormar/fields/ 0000775 0000000 0000000 00000000000 15130200524 0017337 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/fields/__init__.py 0000664 0000000 0000000 00000003011 15130200524 0021443 0 ustar 00root root 0000000 0000000 """
Module with classes and constructors for ormar Fields.
Base Fields types (like String, Integer etc.)
as well as relation Fields (ForeignKey, ManyToMany).
Also a definition for custom CHAR based sqlalchemy UUID field
"""
from ormar.fields.base import BaseField
from ormar.fields.constraints import CheckColumns, IndexColumns, UniqueColumns
from ormar.fields.foreign_key import ForeignKey, ForeignKeyField
from ormar.fields.many_to_many import ManyToMany, ManyToManyField
from ormar.fields.model_fields import (
JSON,
UUID,
BigInteger,
Boolean,
Date,
DateTime,
Decimal,
Enum,
Float,
Integer,
LargeBinary,
SmallInteger,
String,
Text,
Time,
)
from ormar.fields.parsers import DECODERS_MAP, ENCODERS_MAP, SQL_ENCODERS_MAP
from ormar.fields.referential_actions import ReferentialAction
from ormar.fields.sqlalchemy_encrypted import EncryptBackend, EncryptBackends
from ormar.fields.through_field import Through, ThroughField
__all__ = [
"Decimal",
"BigInteger",
"SmallInteger",
"Boolean",
"Date",
"DateTime",
"String",
"JSON",
"Integer",
"Text",
"Float",
"Time",
"UUID",
"Enum",
"ForeignKey",
"ManyToMany",
"ManyToManyField",
"BaseField",
"ForeignKeyField",
"ThroughField",
"Through",
"EncryptBackends",
"EncryptBackend",
"DECODERS_MAP",
"ENCODERS_MAP",
"SQL_ENCODERS_MAP",
"LargeBinary",
"UniqueColumns",
"IndexColumns",
"CheckColumns",
"ReferentialAction",
]
collerek-ormar-c09209a/ormar/fields/base.py 0000664 0000000 0000000 00000034771 15130200524 0020637 0 ustar 00root root 0000000 0000000 from typing import (
TYPE_CHECKING,
Any,
Dict,
List,
Literal,
Optional,
Type,
Union,
overload,
)
import sqlalchemy
from pydantic.fields import FieldInfo, _Unset
import ormar # noqa I101
from ormar import ModelDefinitionError
from ormar.fields.sqlalchemy_encrypted import (
EncryptBackend,
EncryptBackends,
EncryptedString,
)
if TYPE_CHECKING: # pragma no cover
from ormar.models import Model, NewBaseModel
class BaseField(FieldInfo):
"""
BaseField serves as a parent class for all basic Fields in ormar.
It keeps all common parameters available for all fields as well as
set of useful functions.
All values are kept as class variables, ormar Fields are never instantiated.
Subclasses pydantic.FieldInfo to keep the fields related
to pydantic field types like ConstrainedStr
"""
def __init__(self, **kwargs: Any) -> None:
self.__type__: type = kwargs.pop("__type__", None)
self.__pydantic_type__: type = kwargs.pop("__pydantic_type__", None)
self.__sample__: type = kwargs.pop("__sample__", None)
self.related_name = kwargs.pop("related_name", None)
self.column_type: sqlalchemy.Column = kwargs.pop("column_type", None)
self.constraints: List = kwargs.pop("constraints", list())
self.name: str = kwargs.pop("name", None)
self.db_alias: str = kwargs.pop("alias", None)
self.primary_key: bool = kwargs.pop("primary_key", False)
self.autoincrement: bool = kwargs.pop("autoincrement", False)
self.nullable: bool = kwargs.pop("nullable", False)
self.sql_nullable: bool = kwargs.pop("sql_nullable", False)
self.index: bool = kwargs.pop("index", False)
self.unique: bool = kwargs.pop("unique", False)
self.virtual: bool = kwargs.pop(
"virtual", None
) # ManyToManyFields and reverse ForeignKeyFields
self.is_multi: bool = kwargs.pop("is_multi", None) # ManyToManyField
self.is_relation: bool = kwargs.pop(
"is_relation", None
) # ForeignKeyField + subclasses
self.is_through: bool = kwargs.pop("is_through", False) # ThroughFields
self.through_relation_name = kwargs.pop("through_relation_name", None)
self.through_reverse_relation_name = kwargs.pop(
"through_reverse_relation_name", None
)
self.skip_reverse: bool = kwargs.pop("skip_reverse", False)
self.skip_field: bool = kwargs.pop("skip_field", False)
self.owner: Type["Model"] = kwargs.pop("owner", None)
self.to: Type["Model"] = kwargs.pop("to", None)
self.to_pk_only: Type["Model"] = kwargs.pop("to_pk_only", None)
self.through: Type["Model"] = kwargs.pop("through", None)
self.self_reference: bool = kwargs.pop("self_reference", False)
self.self_reference_primary: Optional[str] = kwargs.pop(
"self_reference_primary", None
)
self.orders_by: Optional[List[str]] = kwargs.pop("orders_by", None)
self.related_orders_by: Optional[List[str]] = kwargs.pop(
"related_orders_by", None
)
self.encrypt_secret: str = kwargs.pop("encrypt_secret", None)
self.encrypt_backend: EncryptBackends = kwargs.pop(
"encrypt_backend", EncryptBackends.NONE
)
self.encrypt_custom_backend: Optional[Type[EncryptBackend]] = kwargs.pop(
"encrypt_custom_backend", None
)
self.ormar_default: Any = kwargs.pop("default", None)
self.server_default: Any = kwargs.pop("server_default", None)
self.comment: str = kwargs.pop("comment", None)
self.represent_as_base64_str: bool = kwargs.pop(
"represent_as_base64_str", False
)
for name, value in kwargs.items():
setattr(self, name, value)
kwargs.update(self.get_pydantic_default())
super().__init__(**kwargs)
def is_valid_uni_relation(self) -> bool:
"""
Checks if field is a relation definition but only for ForeignKey relation,
so excludes ManyToMany fields, as well as virtual ForeignKey
(second side of FK relation).
Is used to define if a field is a db ForeignKey column that
should be saved/populated when dealing with internal/own
Model columns only.
:return: result of the check
:rtype: bool
"""
return not self.is_multi and not self.virtual
def get_alias(self) -> str:
"""
Used to translate Model column names to database column names during db queries.
:return: returns custom database column name if defined by user,
otherwise field name in ormar/pydantic
:rtype: str
"""
return self.db_alias if self.db_alias else self.name
def get_pydantic_default(self) -> Dict:
"""
Generates base pydantic.FieldInfo with only default and optionally
required to fix pydantic Json field being set to required=False.
Used in an ormar Model Metaclass.
:return: instance of base pydantic.FieldInfo
:rtype: pydantic.FieldInfo
"""
base = self.default_value()
if base is None:
base = dict(default=None) if self.nullable else dict(default=_Unset)
return base
def default_value(self, use_server: bool = False) -> Optional[Dict]:
"""
Returns a FieldInfo instance with populated default
(static) or default_factory (function).
If the field is a autoincrement primary key the default is None.
Otherwise field have to has either default, or default_factory populated.
If all default conditions fail None is returned.
Used in converting to pydantic FieldInfo.
:param use_server: flag marking if server_default should be
treated as default value, default False
:type use_server: bool
:return: returns a call to pydantic.Field
which is returning a FieldInfo instance
:rtype: Optional[pydantic.FieldInfo]
"""
if self.is_auto_primary_key():
return dict(default=None)
if self.has_default(use_server=use_server):
default = (
self.ormar_default
if self.ormar_default is not None
else self.server_default
)
if callable(default):
return dict(default_factory=default)
return dict(default=default)
return None
@overload
def get_default(
self,
*,
call_default_factory: Literal[True],
validated_data: Union[dict[str, Any], None] = None,
) -> Any: ...
@overload
def get_default(self, *, call_default_factory: Literal[False] = ...) -> Any: ...
def get_default(
self,
*,
call_default_factory: bool = True,
validated_data: Union[dict[str, Any], None] = None,
use_server: bool = False,
) -> Any: # noqa CCR001
"""
Return default value for a field.
If the field is Callable the function is called and actual result is returned.
Used to populate default_values for pydantic Model in ormar Model Metaclass.
:param use_server: flag marking if server_default should be
treated as default value, default False
:type use_server: bool
:return: default value for the field if set, otherwise implicit None
:rtype: Any
"""
if self.has_default():
default = (
self.ormar_default
if self.ormar_default is not None
else self._get_default_server_value(use_server=use_server)
)
return self._get_default_callable_value(
default=default,
call_default_factory=call_default_factory,
)
def _get_default_server_value(self, use_server: bool) -> Any: # pragma: no cover
"""
Return default value for a server side if use_server is True
"""
return self.server_default if use_server else None
@staticmethod
def _get_default_callable_value(default: Any, call_default_factory: bool) -> Any:
"""
Return default factory value if call_default_factory is True
and default is a callable.
"""
return default() if (callable(default) and call_default_factory) else default
def has_default(self, use_server: bool = True) -> bool:
"""
Checks if the field has default value set.
:param use_server: flag marking if server_default should be
treated as default value, default False
:type use_server: bool
:return: result of the check if default value is set
:rtype: bool
"""
return self.ormar_default is not None or (
self.server_default is not None and use_server
)
def is_auto_primary_key(self) -> bool:
"""
Checks if field is first a primary key and if it,
it's than check if it's set to autoincrement.
Autoincrement primary_key is nullable/optional.
:return: result of the check for primary key and autoincrement
:rtype: bool
"""
if self.primary_key:
return self.autoincrement
return False
def construct_constraints(self) -> List:
"""
Converts list of ormar constraints into sqlalchemy ForeignKeys.
Has to be done dynamically as sqlalchemy binds ForeignKey to the table.
And we need a new ForeignKey for subclasses of current model
:return: List of sqlalchemy foreign keys - by default one.
:rtype: List[sqlalchemy.schema.ForeignKey]
"""
constraints = [
sqlalchemy.ForeignKey(
con.reference,
ondelete=con.ondelete,
onupdate=con.onupdate,
name=f"fk_{self.owner.ormar_config.tablename}_{self.to.ormar_config.tablename}"
f"_{self.to.get_column_alias(self.to.ormar_config.pkname)}_{self.name}",
)
for con in self.constraints
]
return constraints
def get_column(self, name: str) -> sqlalchemy.Column:
"""
Returns definition of sqlalchemy.Column used in creation of sqlalchemy.Table.
Populates name, column type constraints, as well as a number of parameters like
primary_key, index, unique, nullable, default and server_default.
:param name: name of the db column - used if alias is not set
:type name: str
:return: actual definition of the database column as sqlalchemy requires.
:rtype: sqlalchemy.Column
"""
if self.encrypt_backend == EncryptBackends.NONE:
column: sqlalchemy.Column = sqlalchemy.Column(
self.db_alias or name,
self.column_type,
*self.construct_constraints(),
primary_key=self.primary_key,
nullable=self.sql_nullable,
index=self.index,
unique=self.unique,
default=self.ormar_default,
server_default=self.server_default,
comment=self.comment,
)
else:
column = self._get_encrypted_column(name=name)
return column
def _get_encrypted_column(self, name: str) -> sqlalchemy.Column:
"""
Returns EncryptedString column type instead of actual column.
:param name: column name
:type name: str
:return: newly defined column
:rtype: sqlalchemy.Column
"""
if self.primary_key or self.is_relation:
raise ModelDefinitionError(
"Primary key field and relations fields" "cannot be encrypted!"
)
column: sqlalchemy.Column = sqlalchemy.Column(
self.db_alias or name,
EncryptedString(
_field_type=self,
encrypt_secret=self.encrypt_secret,
encrypt_backend=self.encrypt_backend,
encrypt_custom_backend=self.encrypt_custom_backend,
),
nullable=self.nullable,
index=self.index,
unique=self.unique,
default=self.ormar_default,
server_default=self.server_default,
)
return column
def expand_relationship(
self,
value: Any,
child: Union["Model", "NewBaseModel"],
to_register: bool = True,
) -> Any:
"""
Function overwritten for relations, in basic field the value is returned as is.
For relations the child model is first constructed (if needed),
registered in relation and returned.
For relation fields the value can be a pk value (Any type of field),
dict (from Model) or actual instance/list of a "Model".
:param value: a Model field value, returned untouched for non relation fields.
:type value: Any
:param child: a child Model to register
:type child: Union["Model", "NewBaseModel"]
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: returns untouched value for normal fields, expands only for relations
:rtype: Any
"""
return value
def set_self_reference_flag(self) -> None:
"""
Sets `self_reference` to True if field to and owner are same model.
:return: None
:rtype: None
"""
if self.owner is not None and (
self.owner == self.to or self.owner.ormar_config == self.to.ormar_config
):
self.self_reference = True
self.self_reference_primary = self.name
def has_unresolved_forward_refs(self) -> bool:
"""
Verifies if the filed has any ForwardRefs that require updating before the
model can be used.
:return: result of the check
:rtype: bool
"""
return False
def evaluate_forward_ref(self, globalns: Any, localns: Any) -> None:
"""
Evaluates the ForwardRef to actual Field based on global and local namespaces
:param globalns: global namespace
:type globalns: Any
:param localns: local namespace
:type localns: Any
:return: None
:rtype: None
"""
def get_related_name(self) -> str:
"""
Returns name to use for reverse relation.
It's either set as `related_name` or by default it's owner model. get_name + 's'
:return: name of the related_name or default related name.
:rtype: str
"""
return "" # pragma: no cover
collerek-ormar-c09209a/ormar/fields/constraints.py 0000664 0000000 0000000 00000001504 15130200524 0022260 0 ustar 00root root 0000000 0000000 from typing import Any, Optional
from sqlalchemy import CheckConstraint, Index, UniqueConstraint
class UniqueColumns(UniqueConstraint):
"""
Subclass of sqlalchemy.UniqueConstraint.
Used to avoid importing anything from sqlalchemy by user.
"""
class IndexColumns(Index):
def __init__(self, *args: Any, name: Optional[str] = None, **kw: Any) -> None:
if not name:
name = "TEMPORARY_NAME"
super().__init__(name, *args, **kw)
"""
Subclass of sqlalchemy.Index.
Used to avoid importing anything from sqlalchemy by user.
"""
class CheckColumns(CheckConstraint):
"""
Subclass of sqlalchemy.CheckConstraint.
Used to avoid importing anything from sqlalchemy by user.
Note that some databases do not actively support check constraints such as MySQL.
"""
collerek-ormar-c09209a/ormar/fields/foreign_key.py 0000664 0000000 0000000 00000055273 15130200524 0022226 0 ustar 00root root 0000000 0000000 import string
import sys
import uuid
from dataclasses import dataclass
from random import choices
from typing import (
TYPE_CHECKING,
Any,
Dict,
ForwardRef,
List,
Optional,
Tuple,
Type,
Union,
overload,
)
import sqlalchemy
from pydantic import BaseModel, create_model
import ormar # noqa I101
from ormar.exceptions import ModelDefinitionError, RelationshipInstanceError
from ormar.fields.base import BaseField
from ormar.fields.referential_actions import ReferentialAction
if TYPE_CHECKING: # pragma no cover
from ormar.fields import ManyToManyField
from ormar.models import Model, NewBaseModel, T
def create_dummy_instance(fk: Type["T"], pk: Any = None) -> "T":
"""
Ormar never returns you a raw data.
So if you have a related field that has a value populated
it will construct you a Model instance out of it.
Creates a "fake" instance of passed Model from pk value.
The instantiated Model has only pk value filled.
To achieve this __pk_only__ flag has to be passed as it skips the validation.
If the nested related Models are required they are set with -1 as pk value.
:param fk: class of the related Model to which instance should be constructed
:type fk: Model class
:param pk: value of the primary_key column
:type pk: Any
:return: Model instance populated with only pk
:rtype: Model
"""
init_dict = {
**{fk.ormar_config.pkname: pk or -1, "__pk_only__": True},
**{
k: create_dummy_instance(v.to)
for k, v in fk.ormar_config.model_fields.items()
if v.is_relation and not v.nullable and not v.virtual
},
}
return fk(**init_dict)
def create_dummy_model(
base_model: Type["T"],
pk_field: Union[BaseField, "ForeignKeyField", "ManyToManyField"],
) -> Type["BaseModel"]:
"""
Used to construct a dummy pydantic model for type hints and pydantic validation.
Populates only pk field and set it to desired type.
:param base_model: class of target dummy model
:type base_model: Model class
:param pk_field: ormar Field to be set on pydantic Model
:type pk_field: Union[BaseField, "ForeignKeyField", "ManyToManyField"]
:return: constructed dummy model
:rtype: pydantic.BaseModel
"""
alias = (
"".join(choices(string.ascii_uppercase, k=6)) # + uuid.uuid4().hex[:4]
).lower()
fields = {f"{pk_field.name}": (pk_field.__type__, None)}
dummy_model = create_model( # type: ignore
f"PkOnly{base_model.get_name(lower=False)}{alias}",
__module__=base_model.__module__,
**fields, # type: ignore
)
return dummy_model
def populate_fk_params_based_on_to_model(
to: Type["T"],
nullable: bool,
onupdate: Optional[str] = None,
ondelete: Optional[str] = None,
) -> Tuple[Any, List, Any, Any]:
"""
Based on target to model to which relation leads to populates the type of the
pydantic field to use, ForeignKey constraint and type of the target column field.
:param to: target related ormar Model
:type to: Model class
:param nullable: marks field as optional/ required
:type nullable: bool
:param onupdate: parameter passed to sqlalchemy.ForeignKey.
How to treat child rows on update of parent (the one where FK is defined) model.
:type onupdate: str
:param ondelete: parameter passed to sqlalchemy.ForeignKey.
How to treat child rows on delete of parent (the one where FK is defined) model.
:type ondelete: str
:return: tuple with target pydantic type, list of fk constraints and target col type
:rtype: Tuple[Any, List, Any]
"""
fk_string = (
to.ormar_config.tablename + "." + to.get_column_alias(to.ormar_config.pkname)
)
to_field = to.ormar_config.model_fields[to.ormar_config.pkname]
pk_only_model = create_dummy_model(to, to_field)
__type__ = (
Union[to_field.__type__, to, pk_only_model]
if not nullable
else Optional[Union[to_field.__type__, to, pk_only_model]]
)
constraints = [
ForeignKeyConstraint(
reference=fk_string, ondelete=ondelete, onupdate=onupdate, name=None
)
]
column_type = to_field.column_type
return __type__, constraints, column_type, pk_only_model
def validate_not_allowed_fields(kwargs: Dict) -> None:
"""
Verifies if not allowed parameters are set on relation models.
Usually they are omitted later anyway but this way it's explicitly
notify the user that it's not allowed/ supported.
:raises ModelDefinitionError: if any forbidden field is set
:param kwargs: dict of kwargs to verify passed to relation field
:type kwargs: Dict
"""
default = kwargs.pop("default", None)
encrypt_secret = kwargs.pop("encrypt_secret", None)
encrypt_backend = kwargs.pop("encrypt_backend", None)
encrypt_custom_backend = kwargs.pop("encrypt_custom_backend", None)
overwrite_pydantic_type = kwargs.pop("overwrite_pydantic_type", None)
not_supported = [
default,
encrypt_secret,
encrypt_backend,
encrypt_custom_backend,
overwrite_pydantic_type,
]
if any(x is not None for x in not_supported):
raise ModelDefinitionError(
f"Argument {next((x for x in not_supported if x is not None))} "
f"is not supported "
"on relation fields!"
)
def validate_referential_action(
action: Optional[Union[ReferentialAction, str]],
) -> Optional[str]:
"""
Validation `onupdate` and `ondelete` action cast to a string value
:raises ModelDefinitionError: if action is a not valid name string value
:param action: referential action attribute or name string
:type action: Optional[Union[ReferentialAction, str]]
:rtype: Optional[str]
"""
if action is not None and not isinstance(action, ReferentialAction):
try:
action = ReferentialAction(action.upper())
except (ValueError, AttributeError):
raise ModelDefinitionError(f"{action} ReferentialAction not supported.")
return action.value if action is not None else None
@dataclass
class ForeignKeyConstraint:
"""
Internal container to store ForeignKey definitions used later
to produce sqlalchemy.ForeignKeys
"""
reference: Union[str, sqlalchemy.Column]
name: Optional[str]
ondelete: Optional[str]
onupdate: Optional[str]
@overload
def ForeignKey(to: Type["T"], **kwargs: Any) -> "T": # pragma: no cover
...
@overload
def ForeignKey(to: ForwardRef, **kwargs: Any) -> "Model": # pragma: no cover
...
def ForeignKey( # type: ignore # noqa CFQ002
to: Union[Type["T"], "ForwardRef"],
*,
name: Optional[str] = None,
unique: bool = False,
nullable: bool = True,
related_name: Optional[str] = None,
virtual: bool = False,
onupdate: Union[ReferentialAction, str, None] = None,
ondelete: Union[ReferentialAction, str, None] = None,
**kwargs: Any,
) -> "T":
"""
Despite a name it's a function that returns constructed ForeignKeyField.
This function is actually used in model declaration (as ormar.ForeignKey(ToModel)).
Accepts number of relation setting parameters as well as all BaseField ones.
:param to: target related ormar Model
:type to: Model class
:param name: name of the database field - later called alias
:type name: str
:param unique: parameter passed to sqlalchemy.ForeignKey, unique flag
:type unique: bool
:param nullable: marks field as optional/ required
:type nullable: bool
:param related_name: name of reversed FK relation populated for you on to model
:type related_name: str
:param virtual: marks if relation is virtual.
It is for reversed FK and auto generated FK on through model in Many2Many relations.
:type virtual: bool
:param onupdate: parameter passed to sqlalchemy.ForeignKey.
How to treat child rows on update of parent (the one where FK is defined) model.
:type onupdate: Union[ReferentialAction, str]
:param ondelete: parameter passed to sqlalchemy.ForeignKey.
How to treat child rows on delete of parent (the one where FK is defined) model.
:type ondelete: Union[ReferentialAction, str]
:param kwargs: all other args to be populated by BaseField
:type kwargs: Any
:return: ormar ForeignKeyField with relation to selected model
:rtype: ForeignKeyField
"""
onupdate = validate_referential_action(action=onupdate)
ondelete = validate_referential_action(action=ondelete)
owner = kwargs.pop("owner", None)
self_reference = kwargs.pop("self_reference", False)
orders_by = kwargs.pop("orders_by", None)
related_orders_by = kwargs.pop("related_orders_by", None)
skip_reverse = kwargs.pop("skip_reverse", False)
skip_field = kwargs.pop("skip_field", False)
sql_nullable = kwargs.pop("sql_nullable", None)
sql_nullable = nullable if sql_nullable is None else sql_nullable
index = kwargs.pop("index", False)
validate_not_allowed_fields(kwargs)
pk_only_model = None
if to.__class__ == ForwardRef:
__type__ = to if not nullable else Optional[to]
constraints: List = []
column_type = None
else:
(
__type__,
constraints,
column_type,
pk_only_model,
) = populate_fk_params_based_on_to_model(
to=to, # type: ignore
nullable=nullable,
ondelete=ondelete,
onupdate=onupdate,
)
namespace = dict(
__type__=__type__,
to=to,
to_pk_only=pk_only_model,
through=None,
alias=name,
name=kwargs.pop("real_name", None),
nullable=nullable,
sql_nullable=sql_nullable,
constraints=constraints,
unique=unique,
column_type=column_type,
related_name=related_name,
virtual=virtual,
primary_key=False,
index=index,
default=None,
server_default=None,
onupdate=onupdate,
ondelete=ondelete,
owner=owner,
self_reference=self_reference,
is_relation=True,
orders_by=orders_by,
related_orders_by=related_orders_by,
skip_reverse=skip_reverse,
skip_field=skip_field,
)
Field = type("ForeignKey", (ForeignKeyField, BaseField), {})
return Field(**namespace)
class ForeignKeyField(BaseField):
"""
Actual class returned from ForeignKey function call and stored in model_fields.
"""
def __init__(self, **kwargs: Any) -> None:
if TYPE_CHECKING: # pragma: no cover
self.__type__: type
self.to: Type["Model"]
self.ondelete: str = kwargs.pop("ondelete", None)
self.onupdate: str = kwargs.pop("onupdate", None)
super().__init__(**kwargs)
def get_source_related_name(self) -> str:
"""
Returns name to use for source relation name.
For FK it's the same, differs for m2m fields.
It's either set as `related_name` or by default it's owner model. get_name + 's'
:return: name of the related_name or default related name.
:rtype: str
"""
return self.get_related_name()
def get_related_name(self) -> str:
"""
Returns name to use for reverse relation.
It's either set as `related_name` or by default it's owner model. get_name + 's'
:return: name of the related_name or default related name.
:rtype: str
"""
return self.related_name or self.owner.get_name() + "s"
def default_target_field_name(self) -> str:
"""
Returns default target model name on through model.
:return: name of the field
:rtype: str
"""
prefix = "from_" if self.self_reference else ""
return self.through_reverse_relation_name or f"{prefix}{self.to.get_name()}"
def default_source_field_name(self) -> str:
"""
Returns default target model name on through model.
:return: name of the field
:rtype: str
"""
prefix = "to_" if self.self_reference else ""
return self.through_relation_name or f"{prefix}{self.owner.get_name()}"
def get_filter_clause_target(self) -> Type["Model"]:
return self.to
def get_model_relation_fields(self, use_alias: bool = False) -> str:
"""
Extract names of the database columns or model fields that are connected
with given relation based on use_alias switch and which side of the relation
the current field is - reverse or normal.
:param use_alias: use db names aliases or model fields
:type use_alias: bool
:return: name or names of the related columns/ fields
:rtype: Union[str, List[str]]
"""
if use_alias:
return self._get_model_relation_fields_alias()
return self._get_model_relation_fields_name()
def _get_model_relation_fields_name(self) -> str:
if self.virtual:
return self.owner.ormar_config.pkname
return self.name
def _get_model_relation_fields_alias(self) -> str:
if self.virtual:
return self.owner.ormar_config.model_fields[
self.owner.ormar_config.pkname
].get_alias()
return self.get_alias()
def get_related_field_alias(self) -> str:
"""
Extract names of the related database columns or that are connected
with given relation based to use as a target in filter clause.
:return: name or names of the related columns/ fields
:rtype: Union[str, Dict[str, str]]
"""
if self.virtual:
field_name = self.get_related_name()
field = self.to.ormar_config.model_fields[field_name]
return field.get_alias()
target_field = self.to.get_column_alias(self.to.ormar_config.pkname)
return target_field
def get_related_field_name(self) -> Union[str, List[str]]:
"""
Returns name of the relation field that should be used in prefetch query.
This field is later used to register relation in prefetch query,
populate relations dict, and populate nested model in prefetch query.
:return: name(s) of the field
:rtype: Union[str, List[str]]
"""
if self.virtual:
return self.get_related_name()
return self.to.ormar_config.pkname
def _evaluate_forward_ref(
self, globalns: Any, localns: Any, is_through: bool = False
) -> None:
target = "through" if is_through else "to"
target_obj = getattr(self, target)
if sys.version_info.minor <= 8: # pragma: no cover
evaluated = target_obj._evaluate(globalns, localns)
else: # pragma: no cover
evaluated = target_obj._evaluate(globalns, localns, recursive_guard=set())
setattr(self, target, evaluated)
def evaluate_forward_ref(self, globalns: Any, localns: Any) -> None:
"""
Evaluates the ForwardRef to actual Field based on global and local namespaces
:param globalns: global namespace
:type globalns: Any
:param localns: local namespace
:type localns: Any
:return: None
:rtype: None
"""
if self.to.__class__ == ForwardRef:
self._evaluate_forward_ref(globalns, localns)
(
self.__type__,
self.constraints,
self.column_type,
self.to_pk_only,
) = populate_fk_params_based_on_to_model(
to=self.to,
nullable=self.nullable,
ondelete=self.ondelete,
onupdate=self.onupdate,
)
def _extract_model_from_sequence(
self, value: List, child: "Model", to_register: bool
) -> List["Model"]:
"""
Takes a list of Models and registers them on parent.
Registration is mutual, so children have also reference to parent.
Used in reverse FK relations.
:param value: list of Model
:type value: List
:param child: child/ related Model
:type child: Model
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: list (if needed) registered Models
:rtype: List["Model"]
"""
return [
self.expand_relationship( # type: ignore
value=val, child=child, to_register=to_register
)
for val in value
]
def _register_existing_model(
self, value: "Model", child: "Model", to_register: bool
) -> "Model":
"""
Takes already created instance and registers it for parent.
Registration is mutual, so children have also reference to parent.
Used in reverse FK relations and normal FK for single models.
:param value: already instantiated Model
:type value: Model
:param child: child/ related Model
:type child: Model
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: (if needed) registered Model
:rtype: Model
"""
if to_register:
self.register_relation(model=value, child=child)
return value
def _construct_model_from_dict(
self, value: dict, child: "Model", to_register: bool
) -> "Model":
"""
Takes a dictionary, creates a instance and registers it for parent.
If dictionary contains only one field and it's a pk it is a __pk_only__ model.
Registration is mutual, so children have also reference to parent.
Used in normal FK for dictionaries.
:param value: dictionary of a Model
:type value: dict
:param child: child/ related Model
:type child: Model
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: (if needed) registered Model
:rtype: Model
"""
pk_only_model = None
keys = set(value.keys())
own_keys = keys - self.to.extract_related_names()
if (
len(own_keys) == 1
and list(own_keys)[0] == self.to.ormar_config.pkname
and value.get(self.to.ormar_config.pkname) is not None
and not self.is_through
):
value["__pk_only__"] = True
pk_only_model = self.to_pk_only(**value)
model = self.to(**value)
if to_register:
self.register_relation(model=model, child=child)
return pk_only_model if pk_only_model is not None else model
def _construct_model_from_pk(
self, value: Any, child: "Model", to_register: bool
) -> "Model":
"""
Takes a pk value, creates a dummy instance and registers it for parent.
Registration is mutual, so children have also reference to parent.
Used in normal FK for dictionaries.
:param value: value of a related pk / fk column
:type value: Any
:param child: child/ related Model
:type child: Model
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: (if needed) registered Model
:rtype: Model
"""
if self.to.pk_type() == uuid.UUID and isinstance(value, str): # pragma: nocover
value = uuid.UUID(value)
if not isinstance(value, self.to.pk_type()):
if isinstance(value, self.to_pk_only):
value = getattr(value, self.to.ormar_config.pkname)
else:
raise RelationshipInstanceError(
f"Relationship error - ForeignKey {self.to.__name__} "
f"is of type {self.to.pk_type()} "
f"while {type(value)} passed as a parameter."
)
model = create_dummy_instance(fk=self.to, pk=value)
if to_register:
self.register_relation(model=model, child=child)
return model
def register_relation(self, model: "Model", child: "Model") -> None:
"""
Registers relation between parent and child in relation manager.
Relation manager is kep on each model (different instance).
Used in Metaclass and sometimes some relations are missing
(i.e. cloned Models in fastapi might miss one).
:param model: parent model (with relation definition)
:type model: Model class
:param child: child model
:type child: Model class
"""
model._orm.add(parent=model, child=child, field=self)
def has_unresolved_forward_refs(self) -> bool:
"""
Verifies if the filed has any ForwardRefs that require updating before the
model can be used.
:return: result of the check
:rtype: bool
"""
return self.to.__class__ == ForwardRef
def expand_relationship(
self,
value: Any,
child: Union["Model", "NewBaseModel"],
to_register: bool = True,
) -> Optional[Union["Model", List["Model"]]]:
"""
For relations the child model is first constructed (if needed),
registered in relation and returned.
For relation fields the value can be a pk value (Any type of field),
dict (from Model) or actual instance/list of a "Model".
Selects the appropriate constructor based on a passed value.
:param value: a Model field value, returned untouched for non relation fields.
:type value: Any
:param child: a child Model to register
:type child: Union["Model", "NewBaseModel"]
:param to_register: flag if the relation should be set in RelationshipManager
:type to_register: bool
:return: returns a Model or a list of Models
:rtype: Optional[Union["Model", List["Model"]]]
"""
if value is None:
return None if not self.virtual else []
constructors = {
f"{self.to.__name__}": self._register_existing_model,
"dict": self._construct_model_from_dict,
"list": self._extract_model_from_sequence,
}
model = constructors.get( # type: ignore
value.__class__.__name__, self._construct_model_from_pk
)(value, child, to_register)
return model
def get_relation_name(self) -> str: # pragma: no cover
"""
Returns name of the relation, which can be a own name or through model
names for m2m models
:return: result of the check
:rtype: bool
"""
return self.name
def get_source_model(self) -> Type["Model"]: # pragma: no cover
"""
Returns model from which the relation comes -> either owner or through model
:return: source model
:rtype: Type["Model"]
"""
return self.owner
collerek-ormar-c09209a/ormar/fields/many_to_many.py 0000664 0000000 0000000 00000026333 15130200524 0022412 0 ustar 00root root 0000000 0000000 from typing import (
TYPE_CHECKING,
Any,
ForwardRef,
List,
Optional,
Tuple,
Type,
Union,
cast,
overload,
)
import ormar # noqa: I100
from ormar import ModelDefinitionError
from ormar.fields import BaseField
from ormar.fields.foreign_key import (
ForeignKeyField,
create_dummy_model,
validate_not_allowed_fields,
)
if TYPE_CHECKING: # pragma no cover
from ormar.models import Model, T
from ormar.relations.relation_proxy import RelationProxy
REF_PREFIX = "#/components/schemas/"
def forbid_through_relations(through: Type["Model"]) -> None:
"""
Verifies if the through model does not have relations.
:param through: through Model to be checked
:type through: Type['Model]
"""
if any(field.is_relation for field in through.ormar_config.model_fields.values()):
raise ModelDefinitionError(
f"Through Models cannot have explicit relations "
f"defined. Remove the relations from Model "
f"{through.get_name(lower=False)}"
)
def populate_m2m_params_based_on_to_model(
to: Type["Model"], nullable: bool
) -> Tuple[Any, Any, Any]:
"""
Based on target to model to which relation leads to populates the type of the
pydantic field to use and type of the target column field.
:param to: target related ormar Model
:type to: Model class
:param nullable: marks field as optional/ required
:type nullable: bool
:return: Tuple[List, Any]
:rtype: tuple with target pydantic type and target col type
"""
to_field = to.ormar_config.model_fields[to.ormar_config.pkname]
pk_only_model = create_dummy_model(to, to_field)
base_type = Union[ # type: ignore
to_field.__type__, # type: ignore
to, # type: ignore
pk_only_model, # type: ignore
List[to], # type: ignore
List[pk_only_model], # type: ignore
]
__type__ = (
base_type # type: ignore
if not nullable
else Optional[base_type] # type: ignore
)
column_type = to_field.column_type
return __type__, column_type, pk_only_model
@overload
def ManyToMany(to: Type["T"], **kwargs: Any) -> "RelationProxy[T]": # pragma: no cover
...
@overload
def ManyToMany(to: ForwardRef, **kwargs: Any) -> "RelationProxy": # pragma: no cover
...
def ManyToMany( # type: ignore
to: Union[Type["T"], "ForwardRef"],
through: Optional[Union[Type["T"], "ForwardRef"]] = None,
*,
name: Optional[str] = None,
unique: bool = False,
virtual: bool = False,
**kwargs: Any,
) -> "RelationProxy[T]":
"""
Despite a name it's a function that returns constructed ManyToManyField.
This function is actually used in model declaration
(as ormar.ManyToMany(ToModel, through=ThroughModel)).
Accepts number of relation setting parameters as well as all BaseField ones.
:param to: target related ormar Model
:type to: Model class
:param through: through model for m2m relation
:type through: Model class
:param name: name of the database field - later called alias
:type name: str
:param unique: parameter passed to sqlalchemy.ForeignKey, unique flag
:type unique: bool
:param virtual: marks if relation is virtual.
It is for reversed FK and auto generated FK on through model in Many2Many relations.
:type virtual: bool
:param kwargs: all other args to be populated by BaseField
:type kwargs: Any
:return: ormar ManyToManyField with m2m relation to selected model
:rtype: ManyToManyField
"""
related_name = kwargs.pop("related_name", None)
nullable = kwargs.pop("nullable", True)
owner = kwargs.pop("owner", None)
self_reference = kwargs.pop("self_reference", False)
orders_by = kwargs.pop("orders_by", None)
related_orders_by = kwargs.pop("related_orders_by", None)
skip_reverse = kwargs.pop("skip_reverse", False)
skip_field = kwargs.pop("skip_field", False)
through_relation_name = kwargs.pop("through_relation_name", None)
through_reverse_relation_name = kwargs.pop("through_reverse_relation_name", None)
if through is not None and through.__class__ != ForwardRef:
forbid_through_relations(cast(Type["Model"], through))
validate_not_allowed_fields(kwargs)
pk_only_model = None
if to.__class__ == ForwardRef:
__type__ = (
Union[to, List[to]] # type: ignore
if not nullable
else Optional[Union[to, List[to]]] # type: ignore
)
column_type = None
else:
__type__, column_type, pk_only_model = populate_m2m_params_based_on_to_model(
to=to, nullable=nullable # type: ignore
)
namespace = dict(
__type__=__type__,
to=to,
to_pk_only=pk_only_model,
through=through,
alias=name,
name=name,
nullable=nullable,
unique=unique,
column_type=column_type,
related_name=related_name,
virtual=virtual,
primary_key=False,
index=False,
default=None,
server_default=None,
owner=owner,
self_reference=self_reference,
is_relation=True,
is_multi=True,
orders_by=orders_by,
related_orders_by=related_orders_by,
skip_reverse=skip_reverse,
skip_field=skip_field,
through_relation_name=through_relation_name,
through_reverse_relation_name=through_reverse_relation_name,
)
Field = type("ManyToMany", (ManyToManyField, BaseField), {})
return Field(**namespace)
class ManyToManyField( # type: ignore
ForeignKeyField,
ormar.QuerySetProtocol,
ormar.RelationProtocol,
):
"""
Actual class returned from ManyToMany function call and stored in model_fields.
"""
def __init__(self, **kwargs: Any) -> None:
if TYPE_CHECKING: # pragma: no cover
self.__type__: type
self.to: Type["Model"]
self.through: Type["Model"]
super().__init__(**kwargs)
def get_source_related_name(self) -> str:
"""
Returns name to use for source relation name.
For FK it's the same, differs for m2m fields.
It's either set as `related_name` or by default it's field name.
:return: name of the related_name or default related name.
:rtype: str
"""
return (
self.through.ormar_config.model_fields[
self.default_source_field_name()
].related_name
or self.name
)
def has_unresolved_forward_refs(self) -> bool:
"""
Verifies if the filed has any ForwardRefs that require updating before the
model can be used.
:return: result of the check
:rtype: bool
"""
return self.to.__class__ == ForwardRef or self.through.__class__ == ForwardRef
def evaluate_forward_ref(self, globalns: Any, localns: Any) -> None:
"""
Evaluates the ForwardRef to actual Field based on global and local namespaces
:param globalns: global namespace
:type globalns: Any
:param localns: local namespace
:type localns: Any
:return: None
:rtype: None
"""
if self.to.__class__ == ForwardRef:
self._evaluate_forward_ref(globalns, localns)
(
self.__type__,
self.column_type,
pk_only_model,
) = populate_m2m_params_based_on_to_model(
to=self.to, nullable=self.nullable
)
self.to_pk_only = pk_only_model
if self.through.__class__ == ForwardRef:
self._evaluate_forward_ref(globalns, localns, is_through=True)
forbid_through_relations(self.through)
def get_relation_name(self) -> str:
"""
Returns name of the relation, which can be a own name or through model
names for m2m models
:return: result of the check
:rtype: bool
"""
if self.self_reference and self.name == self.self_reference_primary:
return self.default_source_field_name()
return self.default_target_field_name()
def get_source_model(self) -> Type["Model"]:
"""
Returns model from which the relation comes -> either owner or through model
:return: source model
:rtype: Type["Model"]
"""
return self.through
def get_filter_clause_target(self) -> Type["Model"]:
return self.through
def get_model_relation_fields(self, use_alias: bool = False) -> str:
"""
Extract names of the database columns or model fields that are connected
with given relation based on use_alias switch.
:param use_alias: use db names aliases or model fields
:type use_alias: bool
:return: name or names of the related columns/ fields
:rtype: Union[str, List[str]]
"""
pk_field = self.owner.ormar_config.model_fields[self.owner.ormar_config.pkname]
result = pk_field.get_alias() if use_alias else pk_field.name
return result
def get_related_field_alias(self) -> str:
"""
Extract names of the related database columns or that are connected
with given relation based to use as a target in filter clause.
:return: name or names of the related columns/ fields
:rtype: Union[str, Dict[str, str]]
"""
if self.self_reference and self.self_reference_primary == self.name:
field_name = self.default_target_field_name()
else:
field_name = self.default_source_field_name()
sub_field = self.through.ormar_config.model_fields[field_name]
return sub_field.get_alias()
def get_related_field_name(self) -> Union[str, List[str]]:
"""
Returns name of the relation field that should be used in prefetch query.
This field is later used to register relation in prefetch query,
populate relations dict, and populate nested model in prefetch query.
:return: name(s) of the field
:rtype: Union[str, List[str]]
"""
if self.self_reference and self.self_reference_primary == self.name:
return self.default_target_field_name()
return self.default_source_field_name()
def create_default_through_model(self) -> None:
"""
Creates default empty through model if no additional fields are required.
"""
owner_name = self.owner.get_name(lower=False)
to_name = self.to.get_name(lower=False)
class_name = f"{owner_name}{to_name}"
table_name = f"{owner_name.lower()}s_{to_name.lower()}s"
base_namespace = {
"__module__": self.owner.__module__,
"__qualname__": f"{self.owner.__qualname__}.{class_name}",
}
new_config = ormar.models.ormar_config.OrmarConfig(
tablename=table_name,
database=self.owner.ormar_config.database,
metadata=self.owner.ormar_config.metadata,
)
through_model = type(
class_name,
(ormar.Model,),
{
**base_namespace,
"ormar_config": new_config,
"id": ormar.Integer(name="id", primary_key=True),
},
)
self.through = cast(Type["Model"], through_model)
collerek-ormar-c09209a/ormar/fields/model_fields.py 0000664 0000000 0000000 00000057344 15130200524 0022354 0 ustar 00root root 0000000 0000000 import datetime
import decimal
import uuid
from enum import Enum as E
from enum import EnumMeta
from typing import TYPE_CHECKING, Any, Optional, Type, TypeVar, Union, overload
import pydantic
import sqlalchemy
import ormar # noqa I101
from ormar import ModelDefinitionError # noqa I101
from ormar.fields import sqlalchemy_uuid
from ormar.fields.base import BaseField # noqa I101
from ormar.fields.sqlalchemy_encrypted import EncryptBackends
try:
from typing import Literal # type: ignore
except ImportError: # pragma: no cover
from typing_extensions import Literal # type: ignore
try:
from typing import Self # type: ignore
except ImportError: # pragma: no cover
from typing_extensions import Self # type: ignore
def is_field_nullable(
nullable: Optional[bool],
default: Any,
server_default: Any,
) -> bool:
"""
Checks if the given field should be nullable/ optional based on parameters given.
:param nullable: flag explicit setting a column as nullable
:type nullable: Optional[bool]
:param default: value or function to be called as default in python
:type default: Any
:param server_default: function to be called as default by sql server
:type server_default: Any
:return: result of the check
:rtype: bool
"""
if nullable is None:
return default is not None or server_default is not None
return nullable
def is_auto_primary_key(primary_key: bool, autoincrement: bool) -> bool:
"""
Checks if field is an autoincrement pk -> if yes it's optional.
:param primary_key: flag if field is a pk field
:type primary_key: bool
:param autoincrement: flag if field should be autoincrement
:type autoincrement: bool
:return: result of the check
:rtype: bool
"""
return primary_key and autoincrement
class ModelFieldFactory:
"""
Default field factory that construct Field classes and populated their values.
"""
_bases: Any = (BaseField,)
_type: Any = None
_sample: Any = None
def __new__(cls, *args: Any, **kwargs: Any) -> Self: # type: ignore
cls.validate(**kwargs)
default = kwargs.pop("default", None)
server_default = kwargs.pop("server_default", None)
nullable = kwargs.pop("nullable", None)
sql_nullable = kwargs.pop("sql_nullable", None)
primary_key = kwargs.pop("primary_key", False)
autoincrement = kwargs.pop("autoincrement", False)
encrypt_secret = kwargs.pop("encrypt_secret", None)
encrypt_backend = kwargs.pop("encrypt_backend", EncryptBackends.NONE)
encrypt_custom_backend = kwargs.pop("encrypt_custom_backend", None)
overwrite_pydantic_type = kwargs.pop("overwrite_pydantic_type", None)
nullable = is_field_nullable(
nullable, default, server_default
) or is_auto_primary_key(primary_key, autoincrement)
sql_nullable = (
False
if primary_key
else (nullable if sql_nullable is None else sql_nullable)
)
enum_class = kwargs.pop("enum_class", None)
field_type = cls._type if enum_class is None else enum_class
namespace = dict(
__type__=field_type,
__pydantic_type__=(
overwrite_pydantic_type
if overwrite_pydantic_type is not None
else field_type
),
__sample__=cls._sample,
alias=kwargs.pop("name", None),
name=None,
primary_key=primary_key,
default=default,
server_default=server_default,
nullable=nullable,
annotation=field_type,
sql_nullable=sql_nullable,
index=kwargs.pop("index", False),
unique=kwargs.pop("unique", False),
autoincrement=autoincrement,
column_type=cls.get_column_type(
**kwargs, sql_nullable=sql_nullable, enum_class=enum_class
),
encrypt_secret=encrypt_secret,
encrypt_backend=encrypt_backend,
encrypt_custom_backend=encrypt_custom_backend,
**kwargs
)
Field = type(cls.__name__, cls._bases, {})
return Field(**namespace)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any: # pragma no cover
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return None
@classmethod
def validate(cls, **kwargs: Any) -> None: # pragma no cover
"""
Used to validate if all required parameters on a given field type are set.
:param kwargs: all params passed during construction
:type kwargs: Any
"""
class String(ModelFieldFactory, str):
"""
String field factory that construct Field classes and populated their values.
"""
_type = str
_sample = "string"
def __new__( # type: ignore # noqa CFQ002
cls,
*,
max_length: int,
min_length: Optional[int] = None,
regex: Optional[str] = None,
**kwargs: Any
) -> Self: # type: ignore
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.String(length=kwargs.get("max_length"))
@classmethod
def validate(cls, **kwargs: Any) -> None:
"""
Used to validate if all required parameters on a given field type are set.
:param kwargs: all params passed during construction
:type kwargs: Any
"""
max_length = kwargs.get("max_length", None)
if max_length <= 0:
raise ModelDefinitionError(
"Parameter max_length is required for field String"
)
class Integer(ModelFieldFactory, int):
"""
Integer field factory that construct Field classes and populated their values.
"""
_type = int
_sample = 0
def __new__( # type: ignore
cls,
*,
minimum: Optional[int] = None,
maximum: Optional[int] = None,
multiple_of: Optional[int] = None,
**kwargs: Any
) -> Self:
autoincrement = kwargs.pop("autoincrement", None)
autoincrement = (
autoincrement
if autoincrement is not None
else kwargs.get("primary_key", False)
)
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
kwargs["ge"] = kwargs["minimum"]
kwargs["le"] = kwargs["maximum"]
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.Integer()
class Text(ModelFieldFactory, str):
"""
Text field factory that construct Field classes and populated their values.
"""
_type = str
_sample = "text"
def __new__(cls, **kwargs: Any) -> Self: # type: ignore
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.Text()
class Float(ModelFieldFactory, float):
"""
Float field factory that construct Field classes and populated their values.
"""
_type = float
_sample = 0.0
def __new__( # type: ignore
cls,
*,
minimum: Optional[float] = None,
maximum: Optional[float] = None,
multiple_of: Optional[int] = None,
**kwargs: Any
) -> Self:
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
kwargs["ge"] = kwargs["minimum"]
kwargs["le"] = kwargs["maximum"]
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.Float()
if TYPE_CHECKING: # pragma: nocover
def Boolean(**kwargs: Any) -> bool:
pass
else:
class Boolean(ModelFieldFactory, int):
"""
Boolean field factory that construct Field classes and populated their values.
"""
_type = bool
_sample = True
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.Boolean()
class DateTime(ModelFieldFactory, datetime.datetime):
"""
DateTime field factory that construct Field classes and populated their values.
"""
_type = datetime.datetime
_sample = "datetime"
def __new__( # type: ignore # noqa CFQ002
cls, *, timezone: bool = False, **kwargs: Any
) -> Self: # type: ignore
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.DateTime(timezone=kwargs.get("timezone", False))
class Date(ModelFieldFactory, datetime.date):
"""
Date field factory that construct Field classes and populated their values.
"""
_type = datetime.date
_sample = "date"
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.Date()
class Time(ModelFieldFactory, datetime.time):
"""
Time field factory that construct Field classes and populated their values.
"""
_type = datetime.time
_sample = "time"
def __new__( # type: ignore # noqa CFQ002
cls, *, timezone: bool = False, **kwargs: Any
) -> Self: # type: ignore
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.Time(timezone=kwargs.get("timezone", False))
class JSON(ModelFieldFactory, pydantic.Json):
"""
JSON field factory that construct Field classes and populated their values.
"""
_type = pydantic.Json
_sample = '{"json": "json"}'
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.JSON(none_as_null=kwargs.get("sql_nullable", False))
if TYPE_CHECKING: # pragma: nocover # noqa: C901
@overload
def LargeBinary( # type: ignore
max_length: int, *, represent_as_base64_str: Literal[True], **kwargs: Any
) -> str: ...
@overload
def LargeBinary( # type: ignore
max_length: int, *, represent_as_base64_str: Literal[False], **kwargs: Any
) -> bytes: ...
@overload
def LargeBinary(
max_length: int, represent_as_base64_str: Literal[False] = ..., **kwargs: Any
) -> bytes: ...
def LargeBinary(
max_length: int, represent_as_base64_str: bool = False, **kwargs: Any
) -> Union[str, bytes]:
pass
else:
class LargeBinary(ModelFieldFactory, bytes):
"""
LargeBinary field factory that construct Field classes
and populated their values.
"""
_type = bytes
_sample = "bytes"
def __new__( # type: ignore # noqa CFQ002
cls,
*,
max_length: int,
represent_as_base64_str: bool = False,
**kwargs: Any
) -> Self: # type: ignore
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.LargeBinary(length=kwargs.get("max_length"))
@classmethod
def validate(cls, **kwargs: Any) -> None:
"""
Used to validate if all required parameters on a given field type are set.
:param kwargs: all params passed during construction
:type kwargs: Any
"""
max_length = kwargs.get("max_length", None)
if max_length <= 0:
raise ModelDefinitionError(
"Parameter max_length is required for field LargeBinary"
)
class BigInteger(Integer, int):
"""
BigInteger field factory that construct Field classes and populated their values.
"""
_type = int
_sample = 0
def __new__( # type: ignore
cls,
*,
minimum: Optional[int] = None,
maximum: Optional[int] = None,
multiple_of: Optional[int] = None,
**kwargs: Any
) -> Self:
autoincrement = kwargs.pop("autoincrement", None)
autoincrement = (
autoincrement
if autoincrement is not None
else kwargs.get("primary_key", False)
)
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
kwargs["ge"] = kwargs["minimum"]
kwargs["le"] = kwargs["maximum"]
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.BigInteger()
class SmallInteger(Integer, int):
"""
SmallInteger field factory that construct Field classes and populated their values.
"""
_type = int
_sample = 0
def __new__( # type: ignore
cls,
*,
minimum: Optional[int] = None,
maximum: Optional[int] = None,
multiple_of: Optional[int] = None,
**kwargs: Any
) -> Self:
autoincrement = kwargs.pop("autoincrement", None)
autoincrement = (
autoincrement
if autoincrement is not None
else kwargs.get("primary_key", False)
)
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
kwargs["ge"] = kwargs["minimum"]
kwargs["le"] = kwargs["maximum"]
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.SmallInteger()
class Decimal(ModelFieldFactory, decimal.Decimal):
"""
Decimal field factory that construct Field classes and populated their values.
"""
_type = decimal.Decimal
_sample = 0.0
def __new__( # type: ignore # noqa CFQ002
cls,
*,
minimum: Optional[float] = None,
maximum: Optional[float] = None,
multiple_of: Optional[int] = None,
precision: Optional[int] = None,
scale: Optional[int] = None,
max_digits: Optional[int] = None,
decimal_places: Optional[int] = None,
**kwargs: Any
) -> Self:
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
kwargs["ge"] = kwargs["minimum"]
kwargs["le"] = kwargs["maximum"]
if kwargs.get("max_digits"):
kwargs["precision"] = kwargs["max_digits"]
elif kwargs.get("precision"):
kwargs["max_digits"] = kwargs["precision"]
if kwargs.get("decimal_places"):
kwargs["scale"] = kwargs["decimal_places"]
elif kwargs.get("scale"):
kwargs["decimal_places"] = kwargs["scale"]
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
precision = kwargs.get("precision")
scale = kwargs.get("scale")
return sqlalchemy.DECIMAL(precision=precision, scale=scale)
@classmethod
def validate(cls, **kwargs: Any) -> None:
"""
Used to validate if all required parameters on a given field type are set.
:param kwargs: all params passed during construction
:type kwargs: Any
"""
precision = kwargs.get("precision")
scale = kwargs.get("scale")
if precision is None or precision < 0 or scale is None or scale < 0:
raise ModelDefinitionError(
"Parameters scale and precision are required for field Decimal"
)
class UUID(ModelFieldFactory, uuid.UUID):
"""
UUID field factory that construct Field classes and populated their values.
"""
_type = uuid.UUID
_sample = "uuid"
def __new__( # type: ignore # noqa CFQ002
cls, *, uuid_format: str = "hex", **kwargs: Any
) -> Self:
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
uuid_format = kwargs.get("uuid_format", "hex")
return sqlalchemy_uuid.UUID(uuid_format=uuid_format)
if TYPE_CHECKING: # pragma: nocover
T = TypeVar("T", bound=E)
def Enum(enum_class: Type[T], **kwargs: Any) -> T:
pass
else:
class Enum(ModelFieldFactory):
"""
Enum field factory that construct Field classes and populated their values.
"""
_type = E
_sample = None
def __new__( # type: ignore # noqa CFQ002
cls, *, enum_class: Type[E], **kwargs: Any
) -> Self:
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
return super().__new__(cls, **kwargs)
@classmethod
def validate(cls, **kwargs: Any) -> None:
enum_class = kwargs.get("enum_class")
if enum_class is None or not isinstance(enum_class, EnumMeta):
raise ModelDefinitionError("Enum Field choices must be EnumType")
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
enum_cls = kwargs.get("enum_class")
return sqlalchemy.Enum(enum_cls)
collerek-ormar-c09209a/ormar/fields/parsers.py 0000664 0000000 0000000 00000005571 15130200524 0021400 0 ustar 00root root 0000000 0000000 import base64
import datetime
import decimal
import uuid
from typing import Any, Callable, Dict, Optional, Union
import pydantic
from pydantic_core import SchemaValidator, core_schema
try:
import orjson as json
except ImportError: # pragma: no cover
import json # type: ignore
def parse_bool(value: str) -> bool:
return value == "true"
def encode_bool(value: bool) -> str:
return "true" if value else "false"
def encode_decimal(value: decimal.Decimal, precision: Optional[int] = None) -> float:
return (
round(float(value), precision) if isinstance(value, decimal.Decimal) else value
)
def encode_bytes(value: Union[str, bytes], represent_as_string: bool = False) -> str:
if represent_as_string:
value = (
value if isinstance(value, str) else base64.b64encode(value).decode("utf-8")
)
else:
value = value if isinstance(value, str) else value.decode("utf-8")
return value
def decode_bytes(value: str, represent_as_string: bool = False) -> bytes:
if represent_as_string:
return value if isinstance(value, bytes) else base64.b64decode(value)
return value if isinstance(value, bytes) else value.encode("utf-8")
def encode_json(value: Any) -> Optional[str]:
if isinstance(value, (datetime.date, datetime.datetime, datetime.time)):
value = value.isoformat()
value = json.dumps(value) if not isinstance(value, str) else re_dump_value(value)
value = value.decode("utf-8") if isinstance(value, bytes) else value
return value
def re_dump_value(value: str) -> Union[str, bytes]:
"""
Re-dumps value due to different string representation in orjson and json
:param value: string to re-dump
:type value: str
:return: re-dumped value
:rtype: List[str]
"""
try:
result: Union[str, bytes] = json.dumps(json.loads(value))
except json.JSONDecodeError:
result = value
return result
ENCODERS_MAP: Dict[type, Callable] = {
datetime.datetime: lambda x: x.isoformat(),
datetime.date: lambda x: x.isoformat(),
datetime.time: lambda x: x.isoformat(),
pydantic.Json: encode_json,
decimal.Decimal: encode_decimal,
uuid.UUID: str,
bytes: encode_bytes,
}
SQL_ENCODERS_MAP: Dict[type, Callable] = {bool: encode_bool, **ENCODERS_MAP}
ADDITIONAL_PARAMETERS_MAP: Dict[type, str] = {
bytes: "represent_as_base64_str",
decimal.Decimal: "decimal_places",
}
DECODERS_MAP: Dict[type, Callable] = {
bool: parse_bool,
datetime.datetime: SchemaValidator(core_schema.datetime_schema()).validate_python,
datetime.date: SchemaValidator(core_schema.date_schema()).validate_python,
datetime.time: SchemaValidator(core_schema.time_schema()).validate_python,
pydantic.Json: json.loads,
decimal.Decimal: lambda x, precision: decimal.Decimal(
x, context=decimal.Context(prec=precision)
),
bytes: decode_bytes,
}
collerek-ormar-c09209a/ormar/fields/referential_actions.py 0000664 0000000 0000000 00000001234 15130200524 0023731 0 ustar 00root root 0000000 0000000 """
Gathers all referential actions by ormar.
"""
from enum import Enum
class ReferentialAction(Enum):
"""
Because the database management system(DBMS) enforces referential constraints,
it must ensure data integrity
if rows in a referenced table are to be deleted (or updated).
If dependent rows in referencing tables still exist,
those references have to be considered.
SQL specifies 5 different referential actions
that shall take place in such occurrences.
"""
CASCADE: str = "CASCADE"
RESTRICT: str = "RESTRICT"
SET_NULL: str = "SET NULL"
SET_DEFAULT: str = "SET DEFAULT"
DO_NOTHING: str = "NO ACTION"
collerek-ormar-c09209a/ormar/fields/sqlalchemy_encrypted.py 0000664 0000000 0000000 00000015521 15130200524 0024134 0 ustar 00root root 0000000 0000000 # inspired by sqlalchemy-utils (https://github.com/kvesteri/sqlalchemy-utils)
import abc
import base64
from enum import Enum
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple, Type, Union
import sqlalchemy.types as types
from sqlalchemy.engine import Dialect
import ormar # noqa: I100, I202
from ormar import ModelDefinitionError # noqa: I202, I100
from ormar.fields.parsers import ADDITIONAL_PARAMETERS_MAP
cryptography = None
try: # pragma: nocover
import cryptography # type: ignore
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
except ImportError: # pragma: nocover
pass
if TYPE_CHECKING: # pragma: nocover
from ormar import BaseField
class EncryptBackend(abc.ABC):
def _refresh(self, key: Union[str, bytes]) -> None:
if isinstance(key, str):
key = key.encode()
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
digest.update(key)
engine_key = digest.finalize()
self._initialize_backend(engine_key)
@abc.abstractmethod
def _initialize_backend(self, secret_key: bytes) -> None: # pragma: nocover
pass
@abc.abstractmethod
def encrypt(self, value: Any) -> str: # pragma: nocover
pass
@abc.abstractmethod
def decrypt(self, value: Any) -> str: # pragma: nocover
pass
class HashBackend(EncryptBackend):
"""
One-way hashing - in example for passwords, no way to decrypt the value!
"""
def _initialize_backend(self, secret_key: bytes) -> None:
self.secret_key = base64.urlsafe_b64encode(secret_key)
def encrypt(self, value: Any) -> str:
if not isinstance(value, str): # pragma: nocover
value = repr(value)
value = value.encode()
digest = hashes.Hash(hashes.SHA512(), backend=default_backend())
digest.update(self.secret_key)
digest.update(value)
hashed_value = digest.finalize()
return hashed_value.hex()
def decrypt(self, value: Any) -> str:
if not isinstance(value, str): # pragma: nocover
value = str(value)
return value
class FernetBackend(EncryptBackend):
"""
Two-way encryption, data stored in db are encrypted but decrypted during query.
"""
def _initialize_backend(self, secret_key: bytes) -> None:
self.secret_key = base64.urlsafe_b64encode(secret_key)
self.fernet = Fernet(self.secret_key)
def encrypt(self, value: Any) -> str:
if not isinstance(value, str):
value = repr(value)
value = value.encode()
encrypted = self.fernet.encrypt(value)
return encrypted.decode("utf-8")
def decrypt(self, value: Any) -> str:
if not isinstance(value, str): # pragma: nocover
value = str(value)
decrypted: Union[str, bytes] = self.fernet.decrypt(value.encode())
if not isinstance(decrypted, str):
decrypted = decrypted.decode("utf-8")
return decrypted
class EncryptBackends(Enum):
NONE = 0
FERNET = 1
HASH = 2
CUSTOM = 3
BACKENDS_MAP = {
EncryptBackends.FERNET: FernetBackend,
EncryptBackends.HASH: HashBackend,
}
class EncryptedString(types.TypeDecorator):
"""
Used to store encrypted values in a database
"""
impl = types.TypeEngine
def __init__(
self,
encrypt_secret: Union[str, Callable],
encrypt_backend: EncryptBackends = EncryptBackends.FERNET,
encrypt_custom_backend: Optional[Type[EncryptBackend]] = None,
**kwargs: Any,
) -> None:
_field_type = kwargs.pop("_field_type")
super().__init__()
if not cryptography: # pragma: nocover
raise ModelDefinitionError(
"In order to encrypt a column 'cryptography' is required!"
)
backend = BACKENDS_MAP.get(encrypt_backend, encrypt_custom_backend)
if (
not backend
or not isinstance(backend, type)
or not issubclass(backend, EncryptBackend)
):
raise ModelDefinitionError("Wrong or no encrypt backend provided!")
self.backend: EncryptBackend = backend()
self._field_type: "BaseField" = _field_type
self._underlying_type: Any = _field_type.column_type
self._key: Union[str, Callable] = encrypt_secret
type_ = self._field_type.__type__
if type_ is None: # pragma: nocover
raise ModelDefinitionError(
f"Improperly configured field " f"{self._field_type.name}"
)
self.type_: Any = type_
def __repr__(self) -> str: # pragma: nocover
return "TEXT()"
def load_dialect_impl(self, dialect: Dialect) -> Any:
return dialect.type_descriptor(types.TEXT())
def _refresh(self) -> None:
key = self._key() if callable(self._key) else self._key
self.backend._refresh(key)
def process_bind_param(self, value: Any, dialect: Dialect) -> Optional[str]:
if value is None:
return value
self._refresh()
try:
value = self._underlying_type.process_bind_param(value, dialect)
except AttributeError:
encoder, additional_parameter = self._get_coder_type_and_params(
coders=ormar.SQL_ENCODERS_MAP
)
if encoder is not None:
params = [value] + (
[additional_parameter] if additional_parameter else []
)
value = encoder(*params)
encrypted_value = self.backend.encrypt(value)
return encrypted_value
def process_result_value(self, value: Any, dialect: Dialect) -> Any:
if value is None: # pragma: no cover
return value
self._refresh()
decrypted_value = self.backend.decrypt(value)
try:
return self._underlying_type.process_result_value(decrypted_value, dialect)
except AttributeError:
decoder, additional_parameter = self._get_coder_type_and_params(
coders=ormar.DECODERS_MAP
)
if decoder is not None:
params = [decrypted_value] + (
[additional_parameter] if additional_parameter else []
)
return decoder(*params) # type: ignore
return self._field_type.__type__(decrypted_value) # type: ignore
def _get_coder_type_and_params(
self, coders: Dict[type, Callable]
) -> Tuple[Optional[Callable], Optional[str]]:
coder = coders.get(self.type_, None)
additional_parameter: Optional[str] = None
if self.type_ in ADDITIONAL_PARAMETERS_MAP:
additional_parameter = getattr(
self._field_type, ADDITIONAL_PARAMETERS_MAP[self.type_]
)
return coder, additional_parameter
collerek-ormar-c09209a/ormar/fields/sqlalchemy_uuid.py 0000664 0000000 0000000 00000002700 15130200524 0023100 0 ustar 00root root 0000000 0000000 import uuid
from typing import Any, Optional
from sqlalchemy import CHAR
from sqlalchemy.engine import Dialect
from sqlalchemy.types import TypeDecorator
class UUID(TypeDecorator):
"""
Platform-independent GUID type.
Uses CHAR(36) if in a string mode, otherwise uses CHAR(32), to store UUID.
For details for different methods check documentation of parent class.
"""
impl = CHAR
def __init__(self, *args: Any, uuid_format: str = "hex", **kwargs: Any) -> None:
super().__init__(*args, **kwargs)
self.uuid_format = uuid_format
def __repr__(self) -> str: # pragma: nocover
if self.uuid_format == "string":
return "CHAR(36)"
return "CHAR(32)"
def load_dialect_impl(self, dialect: Dialect) -> Any:
return (
dialect.type_descriptor(CHAR(36))
if self.uuid_format == "string"
else dialect.type_descriptor(CHAR(32))
)
def process_bind_param(self, value: Any, dialect: Dialect) -> Optional[str]:
if value is None:
return value
return str(value) if self.uuid_format == "string" else "%.32x" % value.int
def process_result_value(
self, value: Optional[str], dialect: Dialect
) -> Optional[uuid.UUID]:
if value is None: # pragma: no cover
return value
if not isinstance(value, uuid.UUID):
return uuid.UUID(value)
return value # pragma: nocover
collerek-ormar-c09209a/ormar/fields/through_field.py 0000664 0000000 0000000 00000004016 15130200524 0022535 0 ustar 00root root 0000000 0000000 import sys
from typing import TYPE_CHECKING, Any, Optional, Type, Union
from ormar.fields.base import BaseField
from ormar.fields.foreign_key import ForeignKeyField
if TYPE_CHECKING: # pragma no cover
from pydantic.typing import ForwardRef
from ormar import Model
if sys.version_info < (3, 7):
ToType = Type[Model]
else:
ToType = Union[Type[Model], ForwardRef]
def Through( # noqa CFQ002
to: "ToType",
*,
name: Optional[str] = None,
related_name: Optional[str] = None,
**kwargs: Any
) -> Any:
"""
Despite a name it's a function that returns constructed ThroughField.
It's a special field populated only for m2m relations.
Accepts number of relation setting parameters as well as all BaseField ones.
:param to: target related ormar Model
:type to: Model class
:param name: name of the database field - later called alias
:type name: str
:param related_name: name of reversed FK relation populated for you on to model
:type related_name: str
It is for reversed FK and auto generated FK on through model in Many2Many relations.
:param kwargs: all other args to be populated by BaseField
:type kwargs: Any
:return: ormar ForeignKeyField with relation to selected model
:rtype: ForeignKeyField
"""
nullable = kwargs.pop("nullable", False)
owner = kwargs.pop("owner", None)
namespace = dict(
__type__=to,
to=to,
through=None,
alias=name,
name=kwargs.pop("real_name", None),
related_name=related_name,
virtual=True,
owner=owner,
nullable=nullable,
unique=False,
column_type=None,
primary_key=False,
index=False,
default=None,
server_default=None,
is_relation=True,
is_through=True,
)
Field = type("Through", (ThroughField, BaseField), {})
return Field(**namespace)
class ThroughField(ForeignKeyField):
"""
Field class used to access ManyToMany model through model.
"""
collerek-ormar-c09209a/ormar/models/ 0000775 0000000 0000000 00000000000 15130200524 0017354 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/models/__init__.py 0000664 0000000 0000000 00000001333 15130200524 0021465 0 ustar 00root root 0000000 0000000 """
Definition of Model, it's parents NewBaseModel and mixins used by models.
Also defines a Metaclass that handles all constructions and relations registration,
ass well as vast number of helper functions for pydantic, sqlalchemy and relations.
"""
from ormar.models.newbasemodel import NewBaseModel # noqa I100
from ormar.models.model_row import ModelRow # noqa I100
from ormar.models.model import Model, T # noqa I100
from ormar.models.excludable import ExcludableItems # noqa I100
from ormar.models.utils import Extra # noqa I100
from ormar.models.ormar_config import OrmarConfig # noqa I100
__all__ = [
"NewBaseModel",
"Model",
"ModelRow",
"ExcludableItems",
"T",
"Extra",
"OrmarConfig",
]
collerek-ormar-c09209a/ormar/models/descriptors/ 0000775 0000000 0000000 00000000000 15130200524 0021715 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/models/descriptors/__init__.py 0000664 0000000 0000000 00000000444 15130200524 0024030 0 ustar 00root root 0000000 0000000 from ormar.models.descriptors.descriptors import (
BytesDescriptor,
JsonDescriptor,
PkDescriptor,
PydanticDescriptor,
RelationDescriptor,
)
__all__ = [
"PydanticDescriptor",
"RelationDescriptor",
"PkDescriptor",
"JsonDescriptor",
"BytesDescriptor",
]
collerek-ormar-c09209a/ormar/models/descriptors/descriptors.py 0000664 0000000 0000000 00000007001 15130200524 0024626 0 ustar 00root root 0000000 0000000 import base64
from typing import TYPE_CHECKING, Any, Type
from ormar.fields.parsers import decode_bytes, encode_json
if TYPE_CHECKING: # pragma: no cover
from ormar import Model
class PydanticDescriptor:
"""
Pydantic descriptor simply delegates everything to pydantic model
"""
def __init__(self, name: str) -> None:
self.name = name
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
value = instance.__dict__.get(self.name, None)
return value
def __set__(self, instance: "Model", value: Any) -> None:
instance._internal_set(self.name, value)
instance.set_save_status(False)
class JsonDescriptor:
"""
Json descriptor dumps/loads strings to actual data on write/read
"""
def __init__(self, name: str) -> None:
self.name = name
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
value = instance.__dict__.get(self.name, None)
return value
def __set__(self, instance: "Model", value: Any) -> None:
value = encode_json(value)
instance._internal_set(self.name, value)
instance.set_save_status(False)
class BytesDescriptor:
"""
Bytes descriptor converts strings to bytes on write and converts bytes to str
if represent_as_base64_str flag is set, so the value can be dumped to json
"""
def __init__(self, name: str) -> None:
self.name = name
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
value = instance.__dict__.get(self.name, None)
field = instance.ormar_config.model_fields[self.name]
if (
value is not None
and field.represent_as_base64_str
and not isinstance(value, str)
):
value = base64.b64encode(value).decode()
return value
def __set__(self, instance: "Model", value: Any) -> None:
field = instance.ormar_config.model_fields[self.name]
if isinstance(value, str):
value = decode_bytes(
value=value, represent_as_string=field.represent_as_base64_str
)
instance._internal_set(self.name, value)
instance.set_save_status(False)
class PkDescriptor:
"""
As of now it's basically a copy of PydanticDescriptor but that will
change in the future with multi column primary keys
"""
def __init__(self, name: str) -> None:
self.name = name
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
value = instance.__dict__.get(self.name, None)
return value
def __set__(self, instance: "Model", value: Any) -> None:
instance._internal_set(self.name, value)
instance.set_save_status(False)
class RelationDescriptor:
"""
Relation descriptor expands the relation to initialize the related model
before setting it to __dict__. Note that expanding also registers the
related model in RelationManager.
"""
def __init__(self, name: str) -> None:
self.name = name
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
if self.name in instance._orm:
return instance._orm.get(self.name) # type: ignore
return None # pragma no cover
def __set__(self, instance: "Model", value: Any) -> None:
instance.ormar_config.model_fields[self.name].expand_relationship(
value=value, child=instance
)
if not isinstance(instance.__dict__.get(self.name), list):
instance.set_save_status(False)
collerek-ormar-c09209a/ormar/models/excludable.py 0000664 0000000 0000000 00000023335 15130200524 0022044 0 ustar 00root root 0000000 0000000 from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, Type, Union
from ormar.queryset.utils import get_relationship_alias_model_and_str
if TYPE_CHECKING: # pragma: no cover
from ormar import Model
@dataclass
class Excludable:
"""
Class that keeps sets of fields to exclude and include
"""
include: Set = field(default_factory=set)
exclude: Set = field(default_factory=set)
def get_copy(self) -> "Excludable":
"""
Return copy of self to avoid in place modifications
:return: copy of self with copied sets
:rtype: ormar.models.excludable.Excludable
"""
_copy = self.__class__()
_copy.include = {x for x in self.include}
_copy.exclude = {x for x in self.exclude}
return _copy
def set_values(self, value: Set, is_exclude: bool) -> None:
"""
Appends the data to include/exclude sets.
:param value: set of values to add
:type value: set
:param is_exclude: flag if values are to be excluded or included
:type is_exclude: bool
"""
prop = "exclude" if is_exclude else "include"
current_value = getattr(self, prop)
current_value.update(value)
setattr(self, prop, current_value)
def is_included(self, key: str) -> bool:
"""
Check if field in included (in set or set is {...})
:param key: key to check
:type key: str
:return: result of the check
:rtype: bool
"""
return (... in self.include or key in self.include) if self.include else True
def is_excluded(self, key: str) -> bool:
"""
Check if field in excluded (in set or set is {...})
:param key: key to check
:type key: str
:return: result of the check
:rtype: bool
"""
return (... in self.exclude or key in self.exclude) if self.exclude else False
class ExcludableItems:
"""
Keeps a dictionary of Excludables by alias + model_name keys
to allow quick lookup by nested models without need to travers
deeply nested dictionaries and passing include/exclude around
"""
def __init__(self) -> None:
self.items: Dict[str, Excludable] = dict()
@classmethod
def from_excludable(cls, other: "ExcludableItems") -> "ExcludableItems":
"""
Copy passed ExcludableItems to avoid inplace modifications.
:param other: other excludable items to be copied
:type other: ormar.models.excludable.ExcludableItems
:return: copy of other
:rtype: ormar.models.excludable.ExcludableItems
"""
new_excludable = cls()
for key, value in other.items.items():
new_excludable.items[key] = value.get_copy()
return new_excludable
def include_entry_count(self) -> int:
"""
Returns count of include items inside
"""
count = 0
for key in self.items.keys():
count += len(self.items[key].include)
return count
def get(self, model_cls: Type["Model"], alias: str = "") -> Excludable:
"""
Return Excludable for given model and alias.
:param model_cls: target model to check
:type model_cls: ormar.models.metaclass.ModelMetaclass
:param alias: table alias from relation manager
:type alias: str
:return: Excludable for given model and alias
:rtype: ormar.models.excludable.Excludable
"""
key = f"{alias + '_' if alias else ''}{model_cls.get_name(lower=True)}"
excludable = self.items.get(key)
if not excludable:
excludable = Excludable()
self.items[key] = excludable
return excludable
def build(
self,
items: Union[List[str], str, Tuple[str], Set[str], Dict],
model_cls: Type["Model"],
is_exclude: bool = False,
) -> None:
"""
Receives the one of the types of items and parses them as to achieve
a end situation with one excludable per alias/model in relation.
Each excludable has two sets of values - one to include, one to exclude.
:param items: values to be included or excluded
:type items: Union[List[str], str, Tuple[str], Set[str], Dict]
:param model_cls: source model from which relations are constructed
:type model_cls: ormar.models.metaclass.ModelMetaclass
:param is_exclude: flag if items should be included or excluded
:type is_exclude: bool
"""
if isinstance(items, str):
items = {items}
if isinstance(items, Dict):
self._traverse_dict(
values=items,
source_model=model_cls,
model_cls=model_cls,
is_exclude=is_exclude,
)
else:
items = set(items)
nested_items = set(x for x in items if "__" in x)
items.difference_update(nested_items)
self._set_excludes(
items=items,
model_name=model_cls.get_name(lower=True),
is_exclude=is_exclude,
)
if nested_items:
self._traverse_list(
values=nested_items, model_cls=model_cls, is_exclude=is_exclude
)
def _set_excludes(
self, items: Set, model_name: str, is_exclude: bool, alias: str = ""
) -> None:
"""
Sets set of values to be included or excluded for given key and model.
:param items: items to include/exclude
:type items: set
:param model_name: name of model to construct key
:type model_name: str
:param is_exclude: flag if values should be included or excluded
:type is_exclude: bool
:param alias:
:type alias: str
"""
key = f"{alias + '_' if alias else ''}{model_name}"
excludable = self.items.get(key)
if not excludable:
excludable = Excludable()
excludable.set_values(value=items, is_exclude=is_exclude)
self.items[key] = excludable
def _traverse_dict( # noqa: CFQ002
self,
values: Dict,
source_model: Type["Model"],
model_cls: Type["Model"],
is_exclude: bool,
related_items: Optional[List] = None,
alias: str = "",
) -> None:
"""
Goes through dict of nested values and construct/update Excludables.
:param values: items to include/exclude
:type values: Dict
:param source_model: source model from which relations are constructed
:type source_model: ormar.models.metaclass.ModelMetaclass
:param model_cls: model from which current relation is constructed
:type model_cls: ormar.models.metaclass.ModelMetaclass
:param is_exclude: flag if values should be included or excluded
:type is_exclude: bool
:param related_items: list of names of related fields chain
:type related_items: List
:param alias: alias of relation
:type alias: str
"""
self_fields = set()
related_items = related_items[:] if related_items else []
for key, value in values.items():
if value is ...:
self_fields.add(key)
elif isinstance(value, set):
(
table_prefix,
target_model,
_,
_,
) = get_relationship_alias_model_and_str(
source_model=source_model, related_parts=related_items + [key]
)
self._set_excludes(
items=value,
model_name=target_model.get_name(),
is_exclude=is_exclude,
alias=table_prefix,
)
else:
# dict
related_items.append(key)
(
table_prefix,
target_model,
_,
_,
) = get_relationship_alias_model_and_str(
source_model=source_model, related_parts=related_items
)
self._traverse_dict(
values=value,
source_model=source_model,
model_cls=target_model,
is_exclude=is_exclude,
related_items=related_items,
alias=table_prefix,
)
if self_fields:
self._set_excludes(
items=self_fields,
model_name=model_cls.get_name(),
is_exclude=is_exclude,
alias=alias,
)
def _traverse_list(
self, values: Set[str], model_cls: Type["Model"], is_exclude: bool
) -> None:
"""
Goes through list of values and construct/update Excludables.
:param values: items to include/exclude
:type values: set
:param model_cls: model from which current relation is constructed
:type model_cls: ormar.models.metaclass.ModelMetaclass
:param is_exclude: flag if values should be included or excluded
:type is_exclude: bool
"""
# here we have only nested related keys
for key in values:
key_split = key.split("__")
related_items, field_name = key_split[:-1], key_split[-1]
(table_prefix, target_model, _, _) = get_relationship_alias_model_and_str(
source_model=model_cls, related_parts=related_items
)
self._set_excludes(
items={field_name},
model_name=target_model.get_name(),
is_exclude=is_exclude,
alias=table_prefix,
)
collerek-ormar-c09209a/ormar/models/helpers/ 0000775 0000000 0000000 00000000000 15130200524 0021016 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/models/helpers/__init__.py 0000664 0000000 0000000 00000002430 15130200524 0023126 0 ustar 00root root 0000000 0000000 from ormar.models.helpers.models import (
check_required_config_parameters,
config_field_not_set,
extract_annotations_and_default_vals,
populate_default_options_values,
)
from ormar.models.helpers.pydantic import (
get_potential_fields,
get_pydantic_base_orm_config,
merge_or_generate_pydantic_config,
)
from ormar.models.helpers.relations import (
alias_manager,
expand_reverse_relationships,
register_relation_in_alias_manager,
)
from ormar.models.helpers.sqlalchemy import (
populate_config_sqlalchemy_table_if_required,
populate_config_tablename_columns_and_pk,
sqlalchemy_columns_from_model_fields,
)
from ormar.models.helpers.validation import modify_schema_example
__all__ = [
"expand_reverse_relationships",
"extract_annotations_and_default_vals",
"populate_config_tablename_columns_and_pk",
"populate_config_sqlalchemy_table_if_required",
"populate_default_options_values",
"alias_manager",
"register_relation_in_alias_manager",
"get_potential_fields",
"get_pydantic_base_orm_config",
"merge_or_generate_pydantic_config",
"check_required_config_parameters",
"sqlalchemy_columns_from_model_fields",
"config_field_not_set",
"remove_excluded_parent_fields",
"modify_schema_example",
]
collerek-ormar-c09209a/ormar/models/helpers/models.py 0000664 0000000 0000000 00000013404 15130200524 0022655 0 ustar 00root root 0000000 0000000 import itertools
import sqlite3
from typing import TYPE_CHECKING, Any, Dict, ForwardRef, List, Tuple, Type
import pydantic
import ormar # noqa: I100
from ormar.models.helpers.pydantic import populate_pydantic_default_values
if TYPE_CHECKING: # pragma no cover
from ormar import Model
from ormar.fields import BaseField
def is_field_an_forward_ref(field: "BaseField") -> bool:
"""
Checks if field is a relation field and whether any of the referenced models
are ForwardRefs that needs to be updated before proceeding.
:param field: model field to verify
:type field: Type[BaseField]
:return: result of the check
:rtype: bool
"""
return field.is_relation and (
field.to.__class__ == ForwardRef or field.through.__class__ == ForwardRef
)
def populate_default_options_values( # noqa: CCR001
new_model: Type["Model"], model_fields: Dict
) -> None:
"""
Sets all optional OrmarConfig values to its defaults
and set model_fields that were already previously extracted.
Here should live all options that are not overwritten/set for all models.
Current options are:
* constraints = []
* abstract = False
:param new_model: newly constructed Model
:type new_model: Model class
:param model_fields: dict of model fields
:type model_fields: Union[Dict[str, type], Dict]
"""
new_model.ormar_config.model_fields.update(model_fields)
if any(is_field_an_forward_ref(field) for field in model_fields.values()):
new_model.ormar_config.requires_ref_update = True
new_model._json_fields = {
name for name, field in model_fields.items() if field.__type__ == pydantic.Json
}
new_model._bytes_fields = {
name for name, field in model_fields.items() if field.__type__ is bytes
}
new_model.__relation_map__ = None
new_model.__ormar_fields_validators__ = None
class Connection(sqlite3.Connection):
def __init__(self, *args: Any, **kwargs: Any) -> None: # pragma: no cover
super().__init__(*args, **kwargs)
self.execute("PRAGMA foreign_keys=1;")
def substitue_backend_pool_for_sqlite(new_model: Type["Model"]) -> None:
"""
Recreates Connection pool for sqlite3 with new factory that
executes "PRAGMA foreign_keys=1; on initialization to enable foreign keys.
:param new_model: newly declared ormar Model
:type new_model: Model class
"""
backend = new_model.ormar_config.database._backend
if (
backend._dialect.name == "sqlite" and "factory" not in backend._options
): # pragma: no cover
backend._options["factory"] = Connection
old_pool = backend._pool
backend._pool = old_pool.__class__(backend._database_url, **backend._options)
def check_required_config_parameters(new_model: Type["Model"]) -> None:
"""
Verifies if ormar.Model has database and metadata set.
Recreates Connection pool for sqlite3
:param new_model: newly declared ormar Model
:type new_model: Model class
"""
if new_model.ormar_config.database is None and not new_model.ormar_config.abstract:
raise ormar.ModelDefinitionError(
f"{new_model.__name__} does not have database defined."
)
elif not new_model.ormar_config.abstract:
substitue_backend_pool_for_sqlite(new_model=new_model)
if new_model.ormar_config.metadata is None and not new_model.ormar_config.abstract:
raise ormar.ModelDefinitionError(
f"{new_model.__name__} does not have metadata defined."
)
def extract_annotations_and_default_vals(attrs: Dict) -> Tuple[Dict, Dict]:
"""
Extracts annotations from class namespace dict and triggers
extraction of ormar model_fields.
:param attrs: namespace of the class created
:type attrs: Dict
:return: namespace of the class updated, dict of extracted model_fields
:rtype: Tuple[Dict, Dict]
"""
key = "__annotations__"
attrs[key] = attrs.get(key, {})
attrs, model_fields = populate_pydantic_default_values(attrs)
return attrs, model_fields
def group_related_list(list_: List) -> Dict:
"""
Translates the list of related strings into a dictionary.
That way nested models are grouped to traverse them in a right order
and to avoid repetition.
Sample: ["people__houses", "people__cars__models", "people__cars__colors"]
will become:
{'people': {'houses': [], 'cars': ['models', 'colors']}}
Result dictionary is sorted by length of the values and by key
:param list_: list of related models used in select related
:type list_: List[str]
:return: list converted to dictionary to avoid repetition and group nested models
:rtype: Dict[str, List]
"""
result_dict: Dict[str, Any] = dict()
list_.sort(key=lambda x: x.split("__")[0])
grouped = itertools.groupby(list_, key=lambda x: x.split("__")[0])
for key, group in grouped:
group_list = list(group)
new = sorted(
["__".join(x.split("__")[1:]) for x in group_list if len(x.split("__")) > 1]
)
if any("__" in x for x in new):
result_dict[key] = group_related_list(new)
else:
result_dict.setdefault(key, []).extend(new)
return dict(sorted(result_dict.items(), key=lambda item: len(item[1])))
def config_field_not_set(model: Type["Model"], field_name: str) -> bool:
"""
Checks if field with given name is already present in model.OrmarConfig.
Then check if it's set to something truthful
(in practice meaning not None, as it's non or ormar Field only).
:param model: newly constructed model
:type model: Model class
:param field_name: name of the ormar field
:type field_name: str
:return: result of the check
:rtype: bool
"""
return not getattr(model.ormar_config, field_name)
collerek-ormar-c09209a/ormar/models/helpers/pydantic.py 0000664 0000000 0000000 00000010100 15130200524 0023173 0 ustar 00root root 0000000 0000000 from types import MappingProxyType
from typing import TYPE_CHECKING, Dict, Optional, Tuple, Type, Union
import pydantic
from pydantic import ConfigDict
from pydantic.fields import FieldInfo
from ormar.exceptions import ModelDefinitionError # noqa: I100, I202
from ormar.fields import BaseField
if TYPE_CHECKING: # pragma no cover
from ormar import Model
from ormar.fields import ManyToManyField
def create_pydantic_field(
field_name: str, model: Type["Model"], model_field: "ManyToManyField"
) -> None:
"""
Registers pydantic field on through model that leads to passed model
and is registered as field_name passed.
Through model is fetched from through attributed on passed model_field.
:param field_name: field name to register
:type field_name: str
:param model: type of field to register
:type model: Model class
:param model_field: relation field from which through model is extracted
:type model_field: ManyToManyField class
"""
model_field.through.model_fields[field_name] = FieldInfo.from_annotated_attribute(
annotation=Optional[model], default=None # type: ignore
)
model_field.through.model_rebuild(
force=True, _types_namespace={model_field.owner.__name__: model_field.owner}
)
def populate_pydantic_default_values(attrs: Dict) -> Tuple[Dict, Dict]:
"""
Extracts ormar fields from annotations (deprecated) and from namespace
dictionary of the class. Fields declared on model are all subclasses of the
BaseField class.
Trigger conversion of ormar field into pydantic FieldInfo, which has all needed
parameters saved.
Overwrites the annotations of ormar fields to corresponding types declared on
ormar fields (constructed dynamically for relations).
Those annotations are later used by pydantic to construct it's own fields.
:param attrs: current class namespace
:type attrs: Dict
:return: namespace of the class updated, dict of extracted model_fields
:rtype: Tuple[Dict, Dict]
"""
model_fields = {}
potential_fields = {}
potential_fields.update(get_potential_fields(attrs))
for field_name, field in potential_fields.items():
field.name = field_name
model_fields[field_name] = field
default_type = (
field.__type__ if not field.nullable else Optional[field.__type__]
)
overwrite_type = (
field.__pydantic_type__
if field.__type__ != field.__pydantic_type__
else None
)
attrs["__annotations__"][field_name] = overwrite_type or default_type
return attrs, model_fields
def merge_or_generate_pydantic_config(attrs: Dict, name: str) -> None:
"""
Checks if the user provided pydantic Config,
and if he did merges it with the default one.
Updates the attrs in place with a new config.
:rtype: None
"""
default_config = get_pydantic_base_orm_config()
if "model_config" in attrs:
provided_config = attrs["model_config"]
if not isinstance(provided_config, dict):
raise ModelDefinitionError(
f"Config provided for class {name} has to be a dictionary."
)
config = {**default_config, **provided_config}
attrs["model_config"] = config
else:
attrs["model_config"] = default_config
def get_pydantic_base_orm_config() -> pydantic.ConfigDict:
"""
Returns empty pydantic Config with orm_mode set to True.
:return: empty default config with orm_mode set.
:rtype: pydantic Config
"""
return ConfigDict(validate_assignment=True, ser_json_bytes="base64")
def get_potential_fields(attrs: Union[Dict, MappingProxyType]) -> Dict:
"""
Gets all the fields in current class namespace that are Fields.
:param attrs: current class namespace
:type attrs: Dict
:return: extracted fields that are ormar Fields
:rtype: Dict
"""
return {
k: v
for k, v in attrs.items()
if (
(isinstance(v, type) and issubclass(v, BaseField))
or isinstance(v, BaseField)
)
}
collerek-ormar-c09209a/ormar/models/helpers/related_names_validation.py 0000664 0000000 0000000 00000003442 15130200524 0026410 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Dict, ForwardRef, List, Optional, Type
import ormar # noqa: I100
if TYPE_CHECKING: # pragma no cover
from ormar import Model
def validate_related_names_in_relations( # noqa CCR001
model_fields: Dict, new_model: Type["Model"]
) -> None:
"""
Performs a validation of relation_names in relation fields.
If multiple fields are leading to the same related model
only one can have empty related_name param
(populated by default as model.name.lower()+'s').
Also related_names have to be unique for given related model.
:raises ModelDefinitionError: if validation of related_names fail
:param model_fields: dictionary of declared ormar model fields
:type model_fields: Dict[str, ormar.Field]
:param new_model:
:type new_model: Model class
"""
already_registered: Dict[str, List[Optional[str]]] = dict()
for field in model_fields.values():
if field.is_relation:
to_name = (
field.to.get_name()
if not field.to.__class__ == ForwardRef
else str(field.to)
)
previous_related_names = already_registered.setdefault(to_name, [])
if field.related_name in previous_related_names:
raise ormar.ModelDefinitionError(
f"Multiple fields declared on {new_model.get_name(lower=False)} "
f"model leading to {field.to.get_name(lower=False)} model without "
f"related_name property set. \nThere can be only one relation with "
f"default/empty name: '{new_model.get_name() + 's'}'"
f"\nTip: provide different related_name for FK and/or M2M fields"
)
previous_related_names.append(field.related_name)
collerek-ormar-c09209a/ormar/models/helpers/relations.py 0000664 0000000 0000000 00000033707 15130200524 0023402 0 ustar 00root root 0000000 0000000 import inspect
import warnings
from typing import TYPE_CHECKING, Any, ForwardRef, List, Optional, Type, Union, cast
from pydantic import BaseModel, create_model, field_serializer
from pydantic._internal._decorators import DecoratorInfos
from pydantic.fields import FieldInfo
from pydantic_core.core_schema import SerializerFunctionWrapHandler
import ormar
from ormar import ForeignKey, ManyToMany
from ormar.fields import Through
from ormar.models.descriptors import RelationDescriptor
from ormar.models.helpers.sqlalchemy import adjust_through_many_to_many_model
from ormar.relations import AliasManager
if TYPE_CHECKING: # pragma no cover
from ormar import Model
from ormar.fields import ForeignKeyField, ManyToManyField
alias_manager = AliasManager()
def register_relation_on_build(field: "ForeignKeyField") -> None:
"""
Registers ForeignKey relation in alias_manager to set a table_prefix.
Registration include also reverse relation side to be able to join both sides.
Relation is registered by model name and relation field name to allow for multiple
relations between two Models that needs to have different
aliases for proper sql joins.
:param field: relation field
:type field: ForeignKey class
"""
alias_manager.add_relation_type(
source_model=field.owner,
relation_name=field.name,
reverse_name=field.get_source_related_name(),
)
def register_many_to_many_relation_on_build(field: "ManyToManyField") -> None:
"""
Registers connection between through model and both sides of the m2m relation.
Registration include also reverse relation side to be able to join both sides.
Relation is registered by model name and relation field name to allow for multiple
relations between two Models that needs to have different
aliases for proper sql joins.
By default relation name is a model.name.lower().
:param field: relation field
:type field: ManyToManyField class
"""
alias_manager.add_relation_type(
source_model=field.through,
relation_name=field.default_source_field_name(),
reverse_name=field.get_source_related_name(),
)
alias_manager.add_relation_type(
source_model=field.through,
relation_name=field.default_target_field_name(),
reverse_name=field.get_related_name(),
)
def expand_reverse_relationship(model_field: "ForeignKeyField") -> None:
"""
If the reverse relation has not been set before it's set here.
:param model_field:
:type model_field:
:return: None
:rtype: None
"""
if reverse_field_not_already_registered(model_field=model_field):
register_reverse_model_fields(model_field=model_field)
def expand_reverse_relationships(model: Type["Model"]) -> None:
"""
Iterates through model_fields of given model and verifies if all reverse
relation have been populated on related models.
If the reverse relation has not been set before it's set here.
:param model: model on which relation should be checked and registered
:type model: Model class
"""
model_fields = list(model.ormar_config.model_fields.values())
for model_field in model_fields:
if (
model_field.is_relation
and not model_field.has_unresolved_forward_refs()
and not model_field.is_through
):
model_field = cast("ForeignKeyField", model_field)
expand_reverse_relationship(model_field=model_field)
def register_reverse_model_fields(model_field: "ForeignKeyField") -> None:
"""
Registers reverse ForeignKey field on related model.
By default it's name.lower()+'s' of the model on which relation is defined.
But if the related_model name is provided it's registered with that name.
Autogenerated reverse fields also set related_name to the original field name.
:param model_field: original relation ForeignKey field
:type model_field: relation Field
"""
related_name = model_field.get_related_name()
related_model_fields = model_field.to.ormar_config.model_fields
if model_field.is_multi:
related_model_fields[related_name] = ManyToMany( # type: ignore
model_field.owner,
through=model_field.through,
name=related_name,
virtual=True,
related_name=model_field.name,
owner=model_field.to,
self_reference=model_field.self_reference,
self_reference_primary=model_field.self_reference_primary,
orders_by=model_field.related_orders_by,
skip_field=model_field.skip_reverse,
through_relation_name=model_field.through_reverse_relation_name,
through_reverse_relation_name=model_field.through_relation_name,
)
# register foreign keys on through model
model_field = cast("ManyToManyField", model_field)
register_through_shortcut_fields(model_field=model_field)
adjust_through_many_to_many_model(model_field=model_field)
else:
related_model_fields[related_name] = ForeignKey( # type: ignore
model_field.owner,
real_name=related_name,
virtual=True,
related_name=model_field.name,
owner=model_field.to,
self_reference=model_field.self_reference,
orders_by=model_field.related_orders_by,
skip_field=model_field.skip_reverse,
)
if not model_field.skip_reverse:
field_type = related_model_fields[related_name].__type__
field_type = replace_models_with_copy(
annotation=field_type, source_model_field=model_field.name
)
if not model_field.is_multi:
field_type = Union[field_type, List[field_type], None] # type: ignore
model_field.to.model_fields[related_name] = FieldInfo.from_annotated_attribute(
annotation=field_type, default=None
)
add_field_serializer_for_reverse_relations(
to_model=model_field.to, related_name=related_name
)
model_field.to.model_rebuild(
force=True,
_types_namespace={
**{model_field.owner.__name__: model_field.owner},
**{
field.to.__name__: field.to
for field in related_model_fields.values()
if field.is_relation and field.to.__class__ != ForwardRef
},
},
)
setattr(model_field.to, related_name, RelationDescriptor(name=related_name))
def add_field_serializer_for_reverse_relations(
to_model: Type["Model"], related_name: str
) -> None:
def serialize(
self: "Model", children: List["Model"], handler: SerializerFunctionWrapHandler
) -> Any:
"""
Serialize a list of nodes, handling circular references
by excluding the children.
"""
try:
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore", message="Pydantic serializer warnings"
)
return handler(children)
except ValueError as exc: # pragma: no cover
if not str(exc).startswith("Circular reference"):
raise exc
result = []
for child in children:
# If there is one circular ref dump all children as pk only
result.append({child.ormar_config.pkname: child.pk})
return result
decorator = field_serializer(related_name, mode="wrap", check_fields=False)(
serialize
)
setattr(to_model, f"serialize_{related_name}", decorator)
# DecoratorInfos.build will overwrite __pydantic_decorators__ on to_model,
# deleting the previous decorators. We need to save them and then merge them.
prev_decorators = getattr(to_model, "__pydantic_decorators__", DecoratorInfos())
new_decorators = DecoratorInfos.build(to_model)
prev_decorators.validators.update(new_decorators.validators)
prev_decorators.field_validators.update(new_decorators.field_validators)
prev_decorators.root_validators.update(new_decorators.root_validators)
prev_decorators.field_serializers.update(new_decorators.field_serializers)
prev_decorators.model_serializers.update(new_decorators.model_serializers)
prev_decorators.model_validators.update(new_decorators.model_validators)
prev_decorators.computed_fields.update(new_decorators.computed_fields)
setattr(to_model, "__pydantic_decorators__", prev_decorators)
def replace_models_with_copy(
annotation: Type, source_model_field: Optional[str] = None
) -> Any:
"""
Replaces all models in annotation with their copies to avoid circular references.
:param annotation: annotation to replace models in
:type annotation: Type
:return: annotation with replaced models
:rtype: Type
"""
if inspect.isclass(annotation) and issubclass(annotation, ormar.Model):
return create_copy_to_avoid_circular_references(model=annotation)
elif hasattr(annotation, "__origin__") and annotation.__origin__ in {list, Union}:
if annotation.__origin__ is list:
return List[ # type: ignore
replace_models_with_copy(
annotation=annotation.__args__[0],
source_model_field=source_model_field,
)
]
elif annotation.__origin__ == Union:
args = annotation.__args__
new_args = [
replace_models_with_copy(
annotation=arg, source_model_field=source_model_field
)
for arg in args
]
return Union[tuple(new_args)]
else:
return annotation
def create_copy_to_avoid_circular_references(model: Type["Model"]) -> Type["BaseModel"]:
new_model = create_model(
model.__name__,
__base__=model,
)
return new_model
def register_through_shortcut_fields(model_field: "ManyToManyField") -> None:
"""
Registers m2m relation through shortcut on both ends of the relation.
:param model_field: relation field defined in parent model
:type model_field: ManyToManyField
"""
through_model = model_field.through
through_name = through_model.get_name(lower=True)
related_name = model_field.get_related_name()
model_field.owner.ormar_config.model_fields[through_name] = Through(
through_model,
real_name=through_name,
virtual=True,
related_name=model_field.name,
owner=model_field.owner,
nullable=True,
)
model_field.to.ormar_config.model_fields[through_name] = Through(
through_model,
real_name=through_name,
virtual=True,
related_name=related_name,
owner=model_field.to,
nullable=True,
)
setattr(model_field.owner, through_name, RelationDescriptor(name=through_name))
setattr(model_field.to, through_name, RelationDescriptor(name=through_name))
def register_relation_in_alias_manager(field: "ForeignKeyField") -> None:
"""
Registers the relation (and reverse relation) in alias manager.
The m2m relations require registration of through model between
actual end models of the relation.
Delegates the actual registration to:
m2m - register_many_to_many_relation_on_build
fk - register_relation_on_build
:param field: relation field
:type field: ForeignKey or ManyToManyField class
"""
if field.is_multi:
if field.has_unresolved_forward_refs():
return
field = cast("ManyToManyField", field)
register_many_to_many_relation_on_build(field=field)
elif field.is_relation and not field.is_through:
if field.has_unresolved_forward_refs():
return
register_relation_on_build(field=field)
def verify_related_name_dont_duplicate(
related_name: str, model_field: "ForeignKeyField"
) -> None:
"""
Verifies whether the used related_name (regardless of the fact if user defined or
auto generated) is already used on related model, but is connected with other model
than the one that we connect right now.
:raises ModelDefinitionError: if name is already used but lead to different related
model
:param related_name:
:type related_name:
:param model_field: original relation ForeignKey field
:type model_field: relation Field
:return: None
:rtype: None
"""
fk_field = model_field.to.ormar_config.model_fields.get(related_name)
if not fk_field: # pragma: no cover
return
if (
fk_field.to != model_field.owner
and fk_field.to.ormar_config != model_field.owner.ormar_config
):
raise ormar.ModelDefinitionError(
f"Relation with related_name "
f"'{related_name}' "
f"leading to model "
f"{model_field.to.get_name(lower=False)} "
f"cannot be used on model "
f"{model_field.owner.get_name(lower=False)} "
f"because it's already used by model "
f"{fk_field.to.get_name(lower=False)}"
)
def reverse_field_not_already_registered(model_field: "ForeignKeyField") -> bool:
"""
Checks if child is already registered in parents pydantic fields.
:raises ModelDefinitionError: if related name is already used but lead to different
related model
:param model_field: original relation ForeignKey field
:type model_field: relation Field
:return: result of the check
:rtype: bool
"""
related_name = model_field.get_related_name()
check_result = related_name not in model_field.to.ormar_config.model_fields
check_result2 = (
model_field.owner.get_name() not in model_field.to.ormar_config.model_fields
)
if not check_result:
verify_related_name_dont_duplicate(
related_name=related_name, model_field=model_field
)
if not check_result2:
verify_related_name_dont_duplicate(
related_name=model_field.owner.get_name(), model_field=model_field
)
return check_result and check_result2
collerek-ormar-c09209a/ormar/models/helpers/sqlalchemy.py 0000664 0000000 0000000 00000030013 15130200524 0023527 0 ustar 00root root 0000000 0000000 import logging
from typing import TYPE_CHECKING, Dict, ForwardRef, List, Optional, Tuple, Type, Union
import sqlalchemy
import ormar # noqa: I100, I202
from ormar.models.descriptors import RelationDescriptor
from ormar.models.helpers.pydantic import create_pydantic_field
from ormar.models.helpers.related_names_validation import (
validate_related_names_in_relations,
)
if TYPE_CHECKING: # pragma no cover
from ormar import BaseField, ForeignKeyField, ManyToManyField, Model
from ormar.models import NewBaseModel
from ormar.models.ormar_config import OrmarConfig
def adjust_through_many_to_many_model(model_field: "ManyToManyField") -> None:
"""
Registers m2m relation on through model.
Sets ormar.ForeignKey from through model to both child and parent models.
Sets sqlalchemy.ForeignKey to both child and parent models.
Sets pydantic fields with child and parent model types.
:param model_field: relation field defined in parent model
:type model_field: ManyToManyField
"""
parent_name = model_field.default_target_field_name()
child_name = model_field.default_source_field_name()
model_fields = model_field.through.ormar_config.model_fields
model_fields[parent_name] = ormar.ForeignKey( # type: ignore
model_field.to,
real_name=parent_name,
ondelete="CASCADE",
owner=model_field.through,
)
model_fields[child_name] = ormar.ForeignKey( # type: ignore
model_field.owner,
real_name=child_name,
ondelete="CASCADE",
owner=model_field.through,
)
create_and_append_m2m_fk(
model=model_field.to, model_field=model_field, field_name=parent_name
)
create_and_append_m2m_fk(
model=model_field.owner, model_field=model_field, field_name=child_name
)
create_pydantic_field(parent_name, model_field.to, model_field)
create_pydantic_field(child_name, model_field.owner, model_field)
setattr(model_field.through, parent_name, RelationDescriptor(name=parent_name))
setattr(model_field.through, child_name, RelationDescriptor(name=child_name))
def create_and_append_m2m_fk(
model: Type["Model"], model_field: "ManyToManyField", field_name: str
) -> None:
"""
Registers sqlalchemy Column with sqlalchemy.ForeignKey leading to the model.
Newly created field is added to m2m relation
through model OrmarConfig columns and table.
:param field_name: name of the column to create
:type field_name: str
:param model: Model class to which FK should be created
:type model: Model class
:param model_field: field with ManyToMany relation
:type model_field: ManyToManyField field
"""
pk_alias = model.get_column_alias(model.ormar_config.pkname)
pk_column = next(
(col for col in model.ormar_config.columns if col.name == pk_alias), None
)
if pk_column is None: # pragma: no cover
raise ormar.ModelDefinitionError(
"ManyToMany relation cannot lead to field without pk"
)
column = sqlalchemy.Column(
field_name,
pk_column.type,
sqlalchemy.schema.ForeignKey(
model.ormar_config.tablename + "." + pk_alias,
ondelete="CASCADE",
onupdate="CASCADE",
name=f"fk_{model_field.through.ormar_config.tablename}_{model.ormar_config.tablename}"
f"_{field_name}_{pk_alias}",
),
)
model_field.through.ormar_config.columns.append(column)
model_field.through.ormar_config.table.append_column(column, replace_existing=True)
def check_pk_column_validity(
field_name: str, field: "BaseField", pkname: Optional[str]
) -> Optional[str]:
"""
Receives the field marked as primary key and verifies if the pkname
was not already set (only one allowed per model).
:raises ModelDefintionError: if pkname already set
:param field_name: name of field
:type field_name: str
:param field: ormar.Field
:type field: BaseField
:param pkname: already set pkname
:type pkname: Optional[str]
:return: name of the field that should be set as pkname
:rtype: str
"""
if pkname is not None:
raise ormar.ModelDefinitionError("Only one primary key column is allowed.")
return field_name
def sqlalchemy_columns_from_model_fields(
model_fields: Dict, new_model: Type["Model"]
) -> Tuple[Optional[str], List[sqlalchemy.Column]]:
"""
Iterates over declared on Model model fields and extracts fields that
should be treated as database fields.
If the model is empty it sets mandatory id field as primary key
(used in through models in m2m relations).
Triggers a validation of relation_names in relation fields. If multiple fields
are leading to the same related model only one can have empty related_name param.
Also related_names have to be unique.
Trigger validation of primary_key - only one and required pk can be set
Sets `owner` on each model_field as reference to newly created Model.
:raises ModelDefinitionError: if validation of related_names fail,
or pkname validation fails.
:param model_fields: dictionary of declared ormar model fields
:type model_fields: Dict[str, ormar.Field]
:param new_model:
:type new_model: Model class
:return: pkname, list of sqlalchemy columns
:rtype: Tuple[Optional[str], List[sqlalchemy.Column]]
"""
if len(model_fields.keys()) == 0:
model_fields["id"] = ormar.Integer(name="id", primary_key=True)
logging.warning(
f"Table {new_model.ormar_config.tablename} had no fields so auto "
"Integer primary key named `id` created."
)
validate_related_names_in_relations(model_fields, new_model)
return _process_fields(model_fields=model_fields, new_model=new_model)
def _process_fields(
model_fields: Dict, new_model: Type["Model"]
) -> Tuple[Optional[str], List[sqlalchemy.Column]]:
"""
Helper method.
Populates pkname and columns.
Sets `owner` on each model_field as reference to newly created Model.
:raises ModelDefinitionError: if validation of related_names fail,
or pkname validation fails.
:param model_fields: dictionary of declared ormar model fields
:type model_fields: Dict[str, ormar.Field]
:param new_model:
:type new_model: Model class
:return: pkname, list of sqlalchemy columns
:rtype: Tuple[Optional[str], List[sqlalchemy.Column]]
"""
columns = []
pkname = None
for field_name, field in model_fields.items():
field.owner = new_model
if _is_through_model_not_set(field):
field.create_default_through_model()
if field.primary_key:
pkname = check_pk_column_validity(field_name, field, pkname)
if _is_db_field(field):
columns.append(field.get_column(field.get_alias()))
return pkname, columns
def _is_through_model_not_set(field: "BaseField") -> bool:
"""
Alias to if check that verifies if through model was created.
:param field: field to check
:type field: "BaseField"
:return: result of the check
:rtype: bool
"""
return field.is_multi and not field.through and not field.to.__class__ == ForwardRef
def _is_db_field(field: "BaseField") -> bool:
"""
Alias to if check that verifies if field should be included in database.
:param field: field to check
:type field: "BaseField"
:return: result of the check
:rtype: bool
"""
return not field.virtual and not field.is_multi
def populate_config_tablename_columns_and_pk(
name: str, new_model: Type["Model"]
) -> Type["Model"]:
"""
Sets Model tablename if it's not already set in OrmarConfig.
Default tablename if not present is class name lower + s (i.e. Bed becomes -> beds)
Checks if Model's OrmarConfig have pkname and columns set.
If not calls the sqlalchemy_columns_from_model_fields to populate
columns from ormar.fields definitions.
:raises ModelDefinitionError: if pkname is not present raises ModelDefinitionError.
Each model has to have pk.
:param name: name of the current Model
:type name: str
:param new_model: currently constructed Model
:type new_model: ormar.models.metaclass.ModelMetaclass
:return: Model with populated pkname and columns in OrmarConfig
:rtype: ormar.models.metaclass.ModelMetaclass
"""
tablename = name.lower() + "s"
new_model.ormar_config.tablename = (
new_model.ormar_config.tablename
if new_model.ormar_config.tablename
else tablename
)
pkname: Optional[str]
if new_model.ormar_config.columns:
columns = new_model.ormar_config.columns
pkname = new_model.ormar_config.pkname
else:
pkname, columns = sqlalchemy_columns_from_model_fields(
new_model.ormar_config.model_fields, new_model
)
if pkname is None:
raise ormar.ModelDefinitionError("Table has to have a primary key.")
new_model.ormar_config.columns = columns
new_model.ormar_config.pkname = pkname
if not new_model.ormar_config.orders_by:
# by default, we sort by pk name if other option not provided
new_model.ormar_config.orders_by.append(pkname)
return new_model
def check_for_null_type_columns_from_forward_refs(config: "OrmarConfig") -> bool:
"""
Check is any column is of NUllType() meaning it's empty column from ForwardRef
:param config: OrmarConfig of the Model without sqlalchemy table constructed
:type config: Model class OrmarConfig
:return: result of the check
:rtype: bool
"""
return not any(
isinstance(col.type, sqlalchemy.sql.sqltypes.NullType) for col in config.columns
)
def populate_config_sqlalchemy_table_if_required(config: "OrmarConfig") -> None:
"""
Constructs sqlalchemy table out of columns and parameters set on OrmarConfig.
It populates name, metadata, columns and constraints.
:param config: OrmarConfig of the Model without sqlalchemy table constructed
:type config: Model class OrmarConfig
"""
if config.table is None and check_for_null_type_columns_from_forward_refs(
config=config
):
set_constraint_names(config=config)
table = sqlalchemy.Table(
config.tablename, config.metadata, *config.columns, *config.constraints
)
config.table = table
def set_constraint_names(config: "OrmarConfig") -> None:
"""
Populates the names on IndexColumns and UniqueColumns and CheckColumns constraints.
:param config: OrmarConfig of the Model without sqlalchemy table constructed
:type config: Model class OrmarConfig
"""
for constraint in config.constraints:
if isinstance(constraint, sqlalchemy.UniqueConstraint) and not constraint.name:
constraint.name = (
f"uc_{config.tablename}_"
f'{"_".join([str(col) for col in constraint._pending_colargs])}'
)
elif (
isinstance(constraint, sqlalchemy.Index)
and constraint.name == "TEMPORARY_NAME"
):
constraint.name = (
f"ix_{config.tablename}_"
f'{"_".join([col for col in constraint._pending_colargs])}'
)
elif isinstance(constraint, sqlalchemy.CheckConstraint) and not constraint.name:
sql_condition: str = str(constraint.sqltext).replace(" ", "_")
constraint.name = f"check_{config.tablename}_{sql_condition}"
def update_column_definition(
model: Union[Type["Model"], Type["NewBaseModel"]], field: "ForeignKeyField"
) -> None:
"""
Updates a column with a new type column based on updated parameters in FK fields.
:param model: model on which columns needs to be updated
:type model: Type["Model"]
:param field: field with column definition that requires update
:type field: ForeignKeyField
:return: None
:rtype: None
"""
columns = model.ormar_config.columns
for ind, column in enumerate(columns):
if column.name == field.get_alias():
new_column = field.get_column(field.get_alias())
columns[ind] = new_column
break
collerek-ormar-c09209a/ormar/models/helpers/validation.py 0000664 0000000 0000000 00000015607 15130200524 0023533 0 ustar 00root root 0000000 0000000 import decimal
import numbers
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
List,
Optional,
Set,
Type,
Union,
)
try:
import orjson as json
except ImportError: # pragma: no cover
import json # type: ignore # noqa: F401
import pydantic
from ormar.models.helpers.models import config_field_not_set
from ormar.queryset.utils import translate_list_to_dict
if TYPE_CHECKING: # pragma no cover
from ormar import Model
from ormar.fields import BaseField
def generate_model_example(
model: Type["Model"], relation_map: Optional[Dict] = None
) -> Dict:
"""
Generates example to be included in schema in fastapi.
:param model: ormar.Model
:type model: Type["Model"]
:param relation_map: dict with relations to follow
:type relation_map: Optional[Dict]
:return: dict with example values
:rtype: Dict[str, int]
"""
example: Dict[str, Any] = dict()
relation_map = (
relation_map
if relation_map is not None
else translate_list_to_dict(model._iterate_related_models())
)
for name, field in model.ormar_config.model_fields.items():
populates_sample_fields_values(
example=example, name=name, field=field, relation_map=relation_map
)
to_exclude = {name for name in model.ormar_config.model_fields}
pydantic_repr = generate_pydantic_example(pydantic_model=model, exclude=to_exclude)
example.update(pydantic_repr)
return example
def populates_sample_fields_values(
example: Dict[str, Any],
name: str,
field: "BaseField",
relation_map: Optional[Dict] = None,
) -> None:
"""
Iterates the field and sets fields to sample values
:param field: ormar field
:type field: BaseField
:param name: name of the field
:type name: str
:param example: example dict
:type example: Dict[str, Any]
:param relation_map: dict with relations to follow
:type relation_map: Optional[Dict]
"""
if not field.is_relation:
is_bytes_str = field.__type__ is bytes and field.represent_as_base64_str
example[name] = field.__sample__ if not is_bytes_str else "string"
elif isinstance(relation_map, dict) and name in relation_map:
example[name] = get_nested_model_example(
name=name, field=field, relation_map=relation_map
)
def get_nested_model_example(
name: str, field: "BaseField", relation_map: Dict
) -> Union[List, Dict]:
"""
Gets representation of nested model.
:param name: name of the field to follow
:type name: str
:param field: ormar field
:type field: BaseField
:param relation_map: dict with relation map
:type relation_map: Dict
:return: nested model or list of nested model repr
:rtype: Union[List, Dict]
"""
value = generate_model_example(field.to, relation_map=relation_map.get(name, {}))
new_value: Union[List, Dict] = [value] if field.is_multi or field.virtual else value
return new_value
def generate_pydantic_example(
pydantic_model: Type[pydantic.BaseModel], exclude: Optional[Set] = None
) -> Dict:
"""
Generates dict with example.
:param pydantic_model: model to parse
:type pydantic_model: Type[pydantic.BaseModel]
:param exclude: list of fields to exclude
:type exclude: Optional[Set]
:return: dict with fields and sample values
:rtype: Dict
"""
example: Dict[str, Any] = dict()
exclude = exclude or set()
name_to_check = [
name for name in pydantic_model.model_fields if name not in exclude
]
for name in name_to_check:
field = pydantic_model.model_fields[name]
type_ = field.annotation
example[name] = get_pydantic_example_repr(type_)
return example
def get_pydantic_example_repr(type_: Any) -> Any:
"""
Gets sample representation of pydantic field for example dict.
:param type_: type of pydantic field
:type type_: Any
:return: representation to include in example
:rtype: Any
"""
if hasattr(type_, "__origin__"):
return generate_example_for_nested_types(type_)
if issubclass(type_, (numbers.Number, decimal.Decimal)):
return 0
if issubclass(type_, pydantic.BaseModel):
return generate_pydantic_example(pydantic_model=type_)
return "string"
def generate_example_for_nested_types(type_: Any) -> Any:
"""
Process nested types like Union[X, Y] or List[X]
"""
if type_.__origin__ == Union:
return generate_example_for_union(type_=type_)
if type_.__origin__ is list:
return [get_pydantic_example_repr(type_.__args__[0])]
def generate_example_for_union(type_: Any) -> Any:
"""
Generates a pydantic example for Union[X, Y, ...].
Note that Optional can also be set as Union[X, None]
"""
values = tuple(
get_pydantic_example_repr(x) for x in type_.__args__ if x is not type(None)
)
return values[0] if len(values) == 1 else values
def overwrite_example_and_description(
schema: Dict[str, Any], model: Type["Model"]
) -> None:
"""
Overwrites the example with properly nested children models.
Overwrites the description if it's taken from ormar.Model.
:param schema: schema of current model
:type schema: Dict[str, Any]
:param model: model class
:type model: Type["Model"]
"""
schema["example"] = generate_model_example(model=model)
def overwrite_binary_format(schema: Dict[str, Any], model: Type["Model"]) -> None:
"""
Overwrites format of the field if it's a LargeBinary field with
a flag to represent the field as base64 encoded string.
:param schema: schema of current model
:type schema: Dict[str, Any]
:param model: model class
:type model: Type["Model"]
"""
for field_id, prop in schema.get("properties", {}).items():
if (
field_id in model._bytes_fields
and model.ormar_config.model_fields[field_id].represent_as_base64_str
):
prop["format"] = "base64"
def construct_schema_function() -> Callable:
"""
Modifies model example and description if needed.
Note that schema extra has to be a function, otherwise it's called to soon
before all the relations are expanded.
:return: callable that will be run by pydantic to modify the schema
:rtype: Callable
"""
def schema_extra(schema: Dict[str, Any], model: Type["Model"]) -> None:
overwrite_example_and_description(schema=schema, model=model)
overwrite_binary_format(schema=schema, model=model)
return staticmethod(schema_extra) # type: ignore
def modify_schema_example(model: Type["Model"]) -> None: # noqa CCR001
"""
Modifies the schema example in openapi schema.
:param model: newly constructed Model
:type model: Model class
"""
if not config_field_not_set(model=model, field_name="model_fields"):
model.model_config["json_schema_extra"] = construct_schema_function()
collerek-ormar-c09209a/ormar/models/metaclass.py 0000664 0000000 0000000 00000066236 15130200524 0021717 0 ustar 00root root 0000000 0000000 import copy
import sys
import warnings
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
List,
Optional,
Set,
Tuple,
Type,
Union,
cast,
)
import pydantic
import sqlalchemy
from pydantic import field_serializer
from pydantic._internal._generics import PydanticGenericMetadata
from pydantic.fields import ComputedFieldInfo, FieldInfo
from pydantic_core.core_schema import SerializerFunctionWrapHandler
from sqlalchemy.sql.schema import ColumnCollectionConstraint
import ormar # noqa I100
import ormar.fields.constraints
from ormar import ModelDefinitionError # noqa I100
from ormar.exceptions import ModelError
from ormar.fields import BaseField
from ormar.fields.constraints import CheckColumns, IndexColumns, UniqueColumns
from ormar.fields.foreign_key import ForeignKeyField
from ormar.fields.many_to_many import ManyToManyField
from ormar.models.descriptors import (
JsonDescriptor,
PkDescriptor,
PydanticDescriptor,
RelationDescriptor,
)
from ormar.models.descriptors.descriptors import BytesDescriptor
from ormar.models.helpers import (
check_required_config_parameters,
config_field_not_set,
expand_reverse_relationships,
extract_annotations_and_default_vals,
get_potential_fields,
merge_or_generate_pydantic_config,
modify_schema_example,
populate_config_sqlalchemy_table_if_required,
populate_config_tablename_columns_and_pk,
populate_default_options_values,
register_relation_in_alias_manager,
sqlalchemy_columns_from_model_fields,
)
from ormar.models.ormar_config import OrmarConfig
from ormar.models.quick_access_views import quick_access_set
from ormar.queryset import FieldAccessor, QuerySet
from ormar.signals import Signal
if TYPE_CHECKING: # pragma no cover
from ormar import Model
from ormar.models import T
CONFIG_KEY = "Config"
PARSED_FIELDS_KEY = "__parsed_fields__"
def add_cached_properties(new_model: Type["Model"]) -> None:
"""
Sets cached properties for both pydantic and ormar models.
Quick access fields are fields grabbed in getattribute to skip all checks.
Related fields and names are populated to None as they can change later.
When children models are constructed they can modify parent to register itself.
All properties here are used as "cache" to not recalculate them constantly.
:param new_model: newly constructed Model
:type new_model: Model class
"""
new_model._quick_access_fields = quick_access_set
new_model._related_names = None
new_model._through_names = None
new_model._related_fields = None
new_model._json_fields = set()
new_model._bytes_fields = set()
def add_property_fields(new_model: Type["Model"], attrs: Dict) -> None: # noqa: CCR001
"""
Checks class namespace for properties or functions with computed_field.
If attribute have decorator_info it was decorated with @computed_field.
Functions like this are exposed in dict() (therefore also fastapi result).
Names of property fields are cached for quicker access / extraction.
:param new_model: newly constructed model
:type new_model: Model class
:param attrs:
:type attrs: Dict[str, str]
"""
props = set()
for var_name, value in attrs.items():
if hasattr(value, "decorator_info") and isinstance(
value.decorator_info, ComputedFieldInfo
):
props.add(var_name)
if config_field_not_set(model=new_model, field_name="property_fields"):
new_model.ormar_config.property_fields = props
else:
new_model.ormar_config.property_fields = (
new_model.ormar_config.property_fields.union(props)
)
def register_signals(new_model: Type["Model"]) -> None: # noqa: CCR001
"""
Registers on model's SignalEmmiter and sets pre-defined signals.
Predefined signals are (pre/post) + (save/update/delete).
Signals are emitted in both model own methods and in selected queryset ones.
:param new_model: newly constructed model
:type new_model: Model class
"""
if config_field_not_set(model=new_model, field_name="signals"):
signals = new_model.ormar_config.signals
signals.pre_save = Signal()
signals.pre_update = Signal()
signals.pre_delete = Signal()
signals.post_save = Signal()
signals.post_update = Signal()
signals.post_delete = Signal()
signals.pre_relation_add = Signal()
signals.post_relation_add = Signal()
signals.pre_relation_remove = Signal()
signals.post_relation_remove = Signal()
signals.post_bulk_update = Signal()
def verify_constraint_names(
base_class: "Model", model_fields: Dict, parent_value: List
) -> None:
"""
Verifies if redefined fields that are overwritten in subclasses did not remove
any name of the column that is used in constraint as it will fail in sqlalchemy
Table creation.
:param base_class: one of the parent classes
:type base_class: Model or model parent class
:param model_fields: ormar fields in defined in current class
:type model_fields: Dict[str, BaseField]
:param parent_value: list of base class constraints
:type parent_value: List
"""
new_aliases = {x.name: x.get_alias() for x in model_fields.values()}
old_aliases = {
x.name: x.get_alias() for x in base_class.ormar_config.model_fields.values()
}
old_aliases.update(new_aliases)
constraints_columns = [x._pending_colargs for x in parent_value]
for column_set in constraints_columns:
if any(x not in old_aliases.values() for x in column_set):
raise ModelDefinitionError(
f"Column constraints "
f"{column_set} "
f"has column names "
f"that are not in the model fields."
f"\n Check columns redefined in subclasses "
f"to verify that they have proper 'name' set."
)
def get_constraint_copy(
constraint: ColumnCollectionConstraint,
) -> Union[UniqueColumns, IndexColumns, CheckColumns]:
"""
Copy the constraint and unpacking it's values
:raises ValueError: if non subclass of ColumnCollectionConstraint
:param value: an instance of the ColumnCollectionConstraint class
:type value: Instance of ColumnCollectionConstraint child
:return: copy ColumnCollectionConstraint ormar constraints
:rtype: Union[UniqueColumns, IndexColumns, CheckColumns]
"""
constraints = {
sqlalchemy.UniqueConstraint: lambda x: UniqueColumns(*x._pending_colargs),
sqlalchemy.Index: lambda x: IndexColumns(*x._pending_colargs),
sqlalchemy.CheckConstraint: lambda x: CheckColumns(x.sqltext),
}
checks = (key if isinstance(constraint, key) else None for key in constraints)
target_class = next((target for target in checks if target is not None), None)
constructor: Optional[Callable] = (
constraints.get(target_class) if target_class else None
)
if not constructor:
raise ValueError(f"{constraint} must be a ColumnCollectionMixin!")
return constructor(constraint)
def update_attrs_from_base_config( # noqa: CCR001
base_class: "Model", attrs: Dict, model_fields: Dict
) -> None:
"""
Updates OrmarConfig parameters in child from parent if needed.
:param base_class: one of the parent classes
:type base_class: Model or model parent class
:param attrs: new namespace for class being constructed
:type attrs: Dict
:param model_fields: ormar fields in defined in current class
:type model_fields: Dict[str, BaseField]
"""
params_to_update = ["metadata", "database", "constraints", "property_fields"]
for param in params_to_update:
current_value = attrs.get("ormar_config", {}).__dict__.get(
param, ormar.Undefined
)
parent_value = (
base_class.ormar_config.__dict__.get(param)
if hasattr(base_class, "ormar_config")
else None
)
if parent_value:
if param == "constraints":
verify_constraint_names(
base_class=base_class,
model_fields=model_fields,
parent_value=parent_value,
)
parent_value = [get_constraint_copy(value) for value in parent_value]
if isinstance(current_value, list):
current_value.extend(parent_value)
else:
setattr(attrs["ormar_config"], param, parent_value)
def copy_and_replace_m2m_through_model( # noqa: CFQ002
field: ManyToManyField,
field_name: str,
table_name: str,
parent_fields: Dict,
attrs: Dict,
ormar_config: OrmarConfig,
base_class: Type["Model"],
) -> None:
"""
Clones class with Through model for m2m relations, appends child name to the name
of the cloned class.
Clones non foreign keys fields from parent model, the same with database columns.
Modifies related_name with appending child table name after '_'
For table name, the table name of child is appended after '_'.
Removes the original sqlalchemy table from metadata if it was not removed.
:param base_class: base class model
:type base_class: Type["Model"]
:param field: field with relations definition
:type field: ManyToManyField
:param field_name: name of the relation field
:type field_name: str
:param table_name: name of the table
:type table_name: str
:param parent_fields: dictionary of fields to copy to new models from parent
:type parent_fields: Dict
:param attrs: new namespace for class being constructed
:type attrs: Dict
:param ormar_config: metaclass of currently created model
:type ormar_config: OrmarConfig
"""
Field: Type[BaseField] = type( # type: ignore
field.__class__.__name__, (ManyToManyField, BaseField), {}
)
copy_field = Field(**dict(field.__dict__))
related_name = field.related_name + "_" + table_name
copy_field.related_name = related_name # type: ignore
through_class = field.through
if not through_class:
field.owner = base_class
field.create_default_through_model()
through_class = field.through
new_config = ormar.OrmarConfig(
tablename=through_class.ormar_config.tablename,
metadata=through_class.ormar_config.metadata,
database=through_class.ormar_config.database,
abstract=through_class.ormar_config.abstract,
queryset_class=through_class.ormar_config.queryset_class,
extra=through_class.ormar_config.extra,
constraints=through_class.ormar_config.constraints,
order_by=through_class.ormar_config.orders_by,
)
new_config.table = through_class.ormar_config.pkname # type: ignore
new_config.pkname = through_class.ormar_config.pkname
new_config.alias_manager = through_class.ormar_config.alias_manager
new_config.signals = through_class.ormar_config.signals
new_config.requires_ref_update = through_class.ormar_config.requires_ref_update
new_config.model_fields = copy.deepcopy(through_class.ormar_config.model_fields)
new_config.property_fields = copy.deepcopy(
through_class.ormar_config.property_fields
)
copy_name = through_class.__name__ + attrs.get("__name__", "")
copy_through = cast(
Type[ormar.Model], type(copy_name, (ormar.Model,), {"ormar_config": new_config})
)
# create new table with copied columns but remove foreign keys
# they will be populated later in expanding reverse relation
# if hasattr(new_config, "table"):
new_config.tablename += "_" + ormar_config.tablename
new_config.table = None # type: ignore
new_config.model_fields = {
name: field
for name, field in new_config.model_fields.items()
if not field.is_relation
}
_, columns = sqlalchemy_columns_from_model_fields(
new_config.model_fields, copy_through
) # type: ignore
new_config.columns = columns
populate_config_sqlalchemy_table_if_required(config=new_config)
copy_field.through = copy_through
parent_fields[field_name] = copy_field
if through_class.ormar_config.table in through_class.ormar_config.metadata:
through_class.ormar_config.metadata.remove(through_class.ormar_config.table)
def copy_data_from_parent_model( # noqa: CCR001
base_class: Type["Model"],
curr_class: type,
attrs: Dict,
model_fields: Dict[str, Union[BaseField, ForeignKeyField, ManyToManyField]],
) -> Tuple[Dict, Dict]:
"""
Copy the key parameters [database, metadata, property_fields and constraints]
and fields from parent models. Overwrites them if needed.
Only abstract classes can be subclassed.
Since relation fields requires different related_name for different children
:raises ModelDefinitionError: if non abstract model is subclassed
:param base_class: one of the parent classes
:type base_class: Model or model parent class
:param curr_class: current constructed class
:type curr_class: Model or model parent class
:param attrs: new namespace for class being constructed
:type attrs: Dict
:param model_fields: ormar fields in defined in current class
:type model_fields: Dict[str, BaseField]
:return: updated attrs and model_fields
:rtype: Tuple[Dict, Dict]
"""
if attrs.get("ormar_config"):
if model_fields and not base_class.ormar_config.abstract: # type: ignore
raise ModelDefinitionError(
f"{curr_class.__name__} cannot inherit "
f"from non abstract class {base_class.__name__}"
)
update_attrs_from_base_config(
base_class=base_class, # type: ignore
attrs=attrs,
model_fields=model_fields,
)
parent_fields: Dict = dict()
ormar_config = attrs.get("ormar_config")
if not ormar_config: # pragma: no cover
raise ModelDefinitionError(
f"Model {curr_class.__name__} declared without ormar_config"
)
table_name = (
ormar_config.tablename
if hasattr(ormar_config, "tablename") and ormar_config.tablename
else attrs.get("__name__", "").lower() + "s"
)
for field_name, field in base_class.ormar_config.model_fields.items():
if field.is_multi:
field = cast(ManyToManyField, field)
copy_and_replace_m2m_through_model(
field=field,
field_name=field_name,
table_name=table_name,
parent_fields=parent_fields,
attrs=attrs,
ormar_config=ormar_config,
base_class=base_class, # type: ignore
)
elif field.is_relation and field.related_name:
Field = type( # type: ignore
field.__class__.__name__, (ForeignKeyField, BaseField), {}
)
copy_field = Field(**dict(field.__dict__))
related_name = field.related_name + "_" + table_name
copy_field.related_name = related_name # type: ignore
parent_fields[field_name] = copy_field
else:
parent_fields[field_name] = field
parent_fields.update(model_fields) # type: ignore
model_fields = parent_fields
return attrs, model_fields
def extract_from_parents_definition( # noqa: CCR001
base_class: type,
curr_class: type,
attrs: Dict,
model_fields: Dict[str, Union[BaseField, ForeignKeyField, ManyToManyField]],
) -> Tuple[Dict, Dict]:
"""
Extracts fields from base classes if they have valid ormar fields.
If model was already parsed -> fields definitions need to be removed from class
cause pydantic complains about field re-definition so after first child
we need to extract from __parsed_fields__ not the class itself.
If the class is parsed first time annotations and field definition is parsed
from the class.__dict__.
If the class is a ormar.Model it is skipped.
:param base_class: one of the parent classes
:type base_class: Model or model parent class
:param curr_class: current constructed class
:type curr_class: Model or model parent class
:param attrs: new namespace for class being constructed
:type attrs: Dict
:param model_fields: ormar fields in defined in current class
:type model_fields: Dict[str, BaseField]
:return: updated attrs and model_fields
:rtype: Tuple[Dict, Dict]
"""
if hasattr(base_class, "ormar_config"):
base_class = cast(Type["Model"], base_class)
return copy_data_from_parent_model(
base_class=base_class,
curr_class=curr_class,
attrs=attrs,
model_fields=model_fields,
)
key = "__annotations__"
if hasattr(base_class, PARSED_FIELDS_KEY):
# model was already parsed -> fields definitions need to be removed from class
# cause pydantic complains about field re-definition so after first child
# we need to extract from __parsed_fields__ not the class itself
new_attrs, new_model_fields = getattr(base_class, PARSED_FIELDS_KEY)
new_fields = set(new_model_fields.keys())
model_fields = update_attrs_and_fields(
attrs=attrs,
new_attrs=new_attrs,
model_fields=model_fields,
new_model_fields=new_model_fields,
new_fields=new_fields,
)
return attrs, model_fields
potential_fields = get_potential_fields(base_class.__dict__)
if potential_fields:
# parent model has ormar fields defined and was not parsed before
new_attrs = {key: {k: v for k, v in base_class.__dict__.get(key, {}).items()}}
new_attrs.update(potential_fields)
new_fields = set(potential_fields.keys())
for name in new_fields:
delattr(base_class, name)
new_attrs, new_model_fields = extract_annotations_and_default_vals(new_attrs)
setattr(base_class, PARSED_FIELDS_KEY, (new_attrs, new_model_fields))
model_fields = update_attrs_and_fields(
attrs=attrs,
new_attrs=new_attrs,
model_fields=model_fields,
new_model_fields=new_model_fields,
new_fields=new_fields,
)
return attrs, model_fields
def update_attrs_and_fields(
attrs: Dict,
new_attrs: Dict,
model_fields: Dict,
new_model_fields: Dict,
new_fields: Set,
) -> Dict:
"""
Updates __annotations__, values of model fields (so pydantic FieldInfos)
as well as model.ormar_config.model_fields definitions from parents.
:param attrs: new namespace for class being constructed
:type attrs: Dict
:param new_attrs: related of the namespace extracted from parent class
:type new_attrs: Dict
:param model_fields: ormar fields in defined in current class
:type model_fields: Dict[str, BaseField]
:param new_model_fields: ormar fields defined in parent classes
:type new_model_fields: Dict[str, BaseField]
:param new_fields: set of new fields names
:type new_fields: Set[str]
"""
key = "__annotations__"
attrs[key].update(new_attrs[key])
attrs.update({name: new_attrs[name] for name in new_fields})
updated_model_fields = {k: v for k, v in new_model_fields.items()}
updated_model_fields.update(model_fields)
return updated_model_fields
def add_field_descriptor(
name: str, field: "BaseField", new_model: Type["Model"]
) -> None:
"""
Sets appropriate descriptor for each model field.
There are 5 main types of descriptors, for bytes, json, pure pydantic fields,
and 2 ormar ones - one for relation and one for pk shortcut
:param name: name of the field
:type name: str
:param field: model field to add descriptor for
:type field: BaseField
:param new_model: model with fields
:type new_model: Type["Model]
"""
if field.is_relation:
setattr(new_model, name, RelationDescriptor(name=name))
elif field.__type__ == pydantic.Json:
setattr(new_model, name, JsonDescriptor(name=name))
elif field.__type__ is bytes:
setattr(new_model, name, BytesDescriptor(name=name))
else:
setattr(new_model, name, PydanticDescriptor(name=name))
def get_serializer() -> Callable:
def serialize(
self: "Model",
value: Optional["Model"],
handler: SerializerFunctionWrapHandler,
) -> Any:
"""
Serialize a value if it's not expired weak reference.
"""
try:
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore", message="Pydantic serializer warnings"
)
return handler(value)
except ReferenceError:
return None
except ValueError as exc:
if not str(exc).startswith("Circular reference"):
raise exc
return {value.ormar_config.pkname: value.pk} if value else None
return serialize
class ModelMetaclass(pydantic._internal._model_construction.ModelMetaclass):
def __new__( # type: ignore # noqa: CCR001
mcs: "ModelMetaclass",
name: str,
bases: Any,
attrs: dict,
__pydantic_generic_metadata__: Union[PydanticGenericMetadata, None] = None,
__pydantic_reset_parent_namespace__: bool = True,
_create_model_module: Union[str, None] = None,
**kwargs,
) -> type:
"""
Metaclass used by ormar Models that performs configuration
and build of ormar Models.
Sets pydantic configuration.
Extract model_fields and convert them to pydantic FieldInfo,
updates class namespace.
Extracts settings and fields from parent classes.
Fetches methods decorated with @computed_field decorator
to expose them later in dict().
Construct parent pydantic Metaclass/ Model.
If class has ormar_config declared (so actual ormar Models) it also:
* populate sqlalchemy columns, pkname and tables from model_fields
* register reverse relationships on related models
* registers all relations in alias manager that populates table_prefixes
* exposes alias manager on each Model
* creates QuerySet for each model and exposes it on a class
* sets custom serializers for relation models
:param name: name of current class
:type name: str
:param bases: base classes
:type bases: Tuple
:param attrs: class namespace
:type attrs: Dict
"""
merge_or_generate_pydantic_config(attrs=attrs, name=name)
attrs["__name__"] = name
attrs, model_fields = extract_annotations_and_default_vals(attrs)
for base in reversed(bases):
mod = base.__module__
if mod.startswith("ormar.models.") or mod.startswith("pydantic."):
continue
attrs, model_fields = extract_from_parents_definition(
base_class=base, curr_class=mcs, attrs=attrs, model_fields=model_fields
)
if "ormar_config" in attrs:
attrs["model_config"]["ignored_types"] = (OrmarConfig,)
attrs["model_config"]["from_attributes"] = True
for field_name, field in model_fields.items():
if field.is_relation:
decorator = field_serializer(
field_name, mode="wrap", check_fields=False
)(get_serializer())
attrs[f"serialize_{field_name}"] = decorator
new_model = super().__new__(
mcs, # type: ignore
name,
bases,
attrs,
__pydantic_generic_metadata__=__pydantic_generic_metadata__,
__pydantic_reset_parent_namespace__=__pydantic_reset_parent_namespace__,
_create_model_module=_create_model_module,
**kwargs,
)
add_cached_properties(new_model)
if hasattr(new_model, "ormar_config"):
populate_default_options_values(new_model, model_fields)
check_required_config_parameters(new_model)
add_property_fields(new_model, attrs)
register_signals(new_model=new_model)
modify_schema_example(model=new_model)
if not new_model.ormar_config.abstract:
new_model = populate_config_tablename_columns_and_pk(name, new_model)
populate_config_sqlalchemy_table_if_required(new_model.ormar_config)
expand_reverse_relationships(new_model)
for field_name, field in new_model.ormar_config.model_fields.items():
register_relation_in_alias_manager(field=field)
add_field_descriptor(
name=field_name, field=field, new_model=new_model
)
if (
new_model.ormar_config.pkname
and new_model.ormar_config.pkname not in attrs["__annotations__"]
and new_model.ormar_config.pkname not in new_model.model_fields
):
field_name = new_model.ormar_config.pkname
new_model.model_fields[field_name] = (
FieldInfo.from_annotated_attribute(
Optional[int], # type: ignore
None,
)
)
new_model.model_rebuild(force=True)
new_model.pk = PkDescriptor(name=new_model.ormar_config.pkname)
return new_model
@property
def objects(cls: Type["T"]) -> "QuerySet[T]": # type: ignore
if cls.ormar_config.requires_ref_update:
raise ModelError(
f"Model {cls.get_name()} has not updated "
f"ForwardRefs. \nBefore using the model you "
f"need to call update_forward_refs()."
)
return cls.ormar_config.queryset_class(model_cls=cls)
def __getattr__(self, item: str) -> Any:
"""
Returns FieldAccessors on access to model fields from a class,
that way it can be used in python style filters and order_by.
:param item: name of the field
:type item: str
:return: FieldAccessor for given field
:rtype: FieldAccessor
"""
# Ugly workaround for name shadowing warnings in pydantic
frame = sys._getframe(1)
file_name = Path(frame.f_code.co_filename)
if (
frame.f_code.co_name == "collect_model_fields"
and file_name.name == "_fields.py"
and file_name.parent.parent.name == "pydantic"
):
raise AttributeError()
if item == "pk":
item = self.ormar_config.pkname
if item in object.__getattribute__(self, "ormar_config").model_fields:
field = self.ormar_config.model_fields.get(item)
if field.is_relation:
return FieldAccessor(
source_model=cast(Type["Model"], self),
model=field.to,
access_chain=item,
)
return FieldAccessor(
source_model=cast(Type["Model"], self), field=field, access_chain=item
)
return object.__getattribute__(self, item)
collerek-ormar-c09209a/ormar/models/mixins/ 0000775 0000000 0000000 00000000000 15130200524 0020663 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/models/mixins/__init__.py 0000664 0000000 0000000 00000001264 15130200524 0022777 0 ustar 00root root 0000000 0000000 """
Package contains functionalities divided by features.
All mixins are combined into ModelTableProxy which is one of the parents of Model.
The split into mixins was done to ease the maintainability of the proxy class, as
it became quite complicated over time.
"""
from ormar.models.mixins.alias_mixin import AliasMixin
from ormar.models.mixins.excludable_mixin import ExcludableMixin
from ormar.models.mixins.merge_mixin import MergeModelMixin
from ormar.models.mixins.pydantic_mixin import PydanticMixin
from ormar.models.mixins.save_mixin import SavePrepareMixin
__all__ = [
"MergeModelMixin",
"AliasMixin",
"SavePrepareMixin",
"ExcludableMixin",
"PydanticMixin",
]
collerek-ormar-c09209a/ormar/models/mixins/alias_mixin.py 0000664 0000000 0000000 00000005407 15130200524 0023540 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Dict
class AliasMixin:
"""
Used to translate field names into database column names.
"""
if TYPE_CHECKING: # pragma: no cover
from ormar.models.ormar_config import OrmarConfig
ormar_config: OrmarConfig
@classmethod
def get_column_alias(cls, field_name: str) -> str:
"""
Returns db alias (column name in db) for given ormar field.
For fields without alias field name is returned.
:param field_name: name of the field to get alias from
:type field_name: str
:return: alias (db name) if set, otherwise passed name
:rtype: str
"""
field = cls.ormar_config.model_fields.get(field_name)
return field.get_alias() if field is not None else field_name
@classmethod
def get_column_name_from_alias(cls, alias: str) -> str:
"""
Returns ormar field name for given db alias (column name in db).
If field do not have alias it's returned as is.
:param alias:
:type alias: str
:return: field name if set, otherwise passed alias (db name)
:rtype: str
"""
for field_name, field in cls.ormar_config.model_fields.items():
if field.get_alias() == alias:
return field_name
return alias # if not found it's not an alias but actual name
@classmethod
def translate_columns_to_aliases(cls, new_kwargs: Dict) -> Dict:
"""
Translates dictionary of model fields changing field names into aliases.
If field has no alias the field name remains intact.
Only fields present in the dictionary are translated.
:param new_kwargs: dict with fields names and their values
:type new_kwargs: Dict
:return: dict with aliases and their values
:rtype: Dict
"""
for field_name, field in cls.ormar_config.model_fields.items():
if field_name in new_kwargs:
new_kwargs[field.get_alias()] = new_kwargs.pop(field_name)
return new_kwargs
@classmethod
def translate_aliases_to_columns(cls, new_kwargs: Dict) -> Dict:
"""
Translates dictionary of model fields changing aliases into field names.
If field has no alias the alias is already a field name.
Only fields present in the dictionary are translated.
:param new_kwargs: dict with aliases and their values
:type new_kwargs: Dict
:return: dict with fields names and their values
:rtype: Dict
"""
for field_name, field in cls.ormar_config.model_fields.items():
if field.get_alias() and field.get_alias() in new_kwargs:
new_kwargs[field_name] = new_kwargs.pop(field.get_alias())
return new_kwargs
collerek-ormar-c09209a/ormar/models/mixins/excludable_mixin.py 0000664 0000000 0000000 00000017352 15130200524 0024561 0 ustar 00root root 0000000 0000000 from typing import (
TYPE_CHECKING,
AbstractSet,
Any,
Dict,
List,
Mapping,
Optional,
Set,
Type,
Union,
cast,
)
from ormar.models.excludable import ExcludableItems
from ormar.models.mixins.relation_mixin import RelationMixin
if TYPE_CHECKING: # pragma no cover
from ormar import Model
IntStr = Union[int, str]
AbstractSetIntStr = AbstractSet[IntStr]
MappingIntStrAny = Mapping[IntStr, Any]
class ExcludableMixin(RelationMixin):
"""
Used to include/exclude given set of fields on models during load and dict() calls.
"""
if TYPE_CHECKING: # pragma: no cover
from ormar import Model
from ormar.models import ModelRow
@staticmethod
def get_child(
items: Union[Set, Dict, None], key: Optional[str] = None
) -> Union[Set, Dict, None]:
"""
Used to get nested dictionaries keys if they exists otherwise returns
passed items.
:param items: bag of items to include or exclude
:type items: Union[Set, Dict, None]
:param key: name of the child to extract
:type key: str
:return: child extracted from items if exists
:rtype: Union[Set, Dict, None]
"""
if isinstance(items, dict):
return items.get(key, {})
return items
@staticmethod
def _populate_pk_column(
model: Union[Type["Model"], Type["ModelRow"]],
columns: List[str],
use_alias: bool = False,
) -> List[str]:
"""
Adds primary key column/alias (depends on use_alias flag) to list of
column names that are selected.
:param model: model on columns are selected
:type model: Type["Model"]
:param columns: list of columns names
:type columns: List[str]
:param use_alias: flag to set if aliases or field names should be used
:type use_alias: bool
:return: list of columns names with pk column in it
:rtype: List[str]
"""
pk_alias = (
model.get_column_alias(model.ormar_config.pkname)
if use_alias
else model.ormar_config.pkname
)
if pk_alias not in columns:
columns.append(pk_alias)
return columns
@classmethod
def own_table_columns(
cls,
model: Union[Type["Model"], Type["ModelRow"]],
excludable: ExcludableItems,
alias: str = "",
use_alias: bool = False,
add_pk_columns: bool = True,
) -> List[str]:
"""
Returns list of aliases or field names for given model.
Aliases/names switch is use_alias flag.
If provided only fields included in fields will be returned.
If provided fields in exclude_fields will be excluded in return.
Primary key field is always added and cannot be excluded (will be added anyway).
:param add_pk_columns: flag if add primary key - always yes if ormar parses data
:type add_pk_columns: bool
:param alias: relation prefix
:type alias: str
:param excludable: structure of fields to include and exclude
:type excludable: ExcludableItems
:param model: model on columns are selected
:type model: Type["Model"]
:param use_alias: flag if aliases or field names should be used
:type use_alias: bool
:return: list of column field names or aliases
:rtype: List[str]
"""
model_excludable = excludable.get(model_cls=model, alias=alias) # type: ignore
columns = [
model.get_column_name_from_alias(col.name) if not use_alias else col.name
for col in model.ormar_config.table.columns
]
field_names = [
model.get_column_name_from_alias(col.name)
for col in model.ormar_config.table.columns
]
if model_excludable.include:
columns = [
col
for col, name in zip(columns, field_names)
if model_excludable.is_included(name)
]
if model_excludable.exclude:
columns = [
col
for col, name in zip(columns, field_names)
if not model_excludable.is_excluded(name)
]
# always has to return pk column for ormar to work
if add_pk_columns:
columns = cls._populate_pk_column(
model=model, columns=columns, use_alias=use_alias
)
return columns
@classmethod
def _update_excluded_with_related(cls, exclude: Union[Set, Dict, None]) -> Set:
"""
Used during generation of the dict().
To avoid cyclical references and max recurrence limit nested models have to
exclude related models that are not mandatory.
For a main model (not nested) only nullable related field names are added to
exclusion, for nested models all related models are excluded.
:param exclude: set/dict with fields to exclude
:type exclude: Union[Set, Dict, None]
:return: set or dict with excluded fields added.
:rtype: Union[Set, Dict]
"""
exclude = exclude or set()
related_set = cls.extract_related_names()
if isinstance(exclude, set):
exclude = {s for s in exclude}
exclude = exclude.union(related_set)
elif isinstance(exclude, dict):
# relations are handled in ormar - take only own fields (ellipsis in dict)
exclude = {k for k, v in exclude.items() if v is Ellipsis}
exclude = exclude.union(related_set)
return exclude
@classmethod
def _update_excluded_with_pks_and_through(
cls, exclude: Set, exclude_primary_keys: bool, exclude_through_models: bool
) -> Set:
"""
Updates excluded names with name of pk column if exclude flag is set.
:param exclude: set of names to exclude
:type exclude: Set
:param exclude_primary_keys: flag if the primary keys should be excluded
:type exclude_primary_keys: bool
:return: set updated with pk if flag is set
:rtype: Set
"""
if exclude_primary_keys:
exclude.add(cls.ormar_config.pkname)
if exclude_through_models:
exclude = exclude.union(cls.extract_through_names())
return exclude
@classmethod
def get_names_to_exclude(cls, excludable: ExcludableItems, alias: str) -> Set:
"""
Returns a set of models field names that should be explicitly excluded
during model initialization.
Those fields will be set to None to avoid ormar/pydantic setting default
values on them. They should be returned as None in any case.
Used in parsing data from database rows that construct Models by initializing
them with dicts constructed from those db rows.
:param alias: alias of current relation
:type alias: str
:param excludable: structure of fields to include and exclude
:type excludable: ExcludableItems
:return: set of field names that should be excluded
:rtype: Set
"""
model = cast(Type["Model"], cls)
model_excludable = excludable.get(model_cls=model, alias=alias)
fields_names = cls.extract_db_own_fields()
if model_excludable.include:
fields_to_keep = model_excludable.include.intersection(fields_names)
else:
fields_to_keep = fields_names
fields_to_exclude = fields_names - fields_to_keep
if model_excludable.exclude:
fields_to_exclude = fields_to_exclude.union(
model_excludable.exclude.intersection(fields_names)
)
fields_to_exclude = fields_to_exclude - {cls.ormar_config.pkname}
return fields_to_exclude
collerek-ormar-c09209a/ormar/models/mixins/merge_mixin.py 0000664 0000000 0000000 00000014076 15130200524 0023550 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Dict, List, Optional, cast
import ormar
from ormar.queryset.utils import translate_list_to_dict
if TYPE_CHECKING: # pragma no cover
from ormar import Model
class MergeModelMixin:
"""
Used to merge models instances returned by database,
but already initialized to ormar Models.keys
Models can duplicate during joins when parent model has multiple child rows,
in the end all parent (main) models should be unique.
"""
@classmethod
def _recursive_add(cls, model_group: List["Model"]) -> List["Model"]:
"""
Instead of accumulating the model additions one by one, this recursively adds
the models. E.G.
[1, 2, 3, 4].accumulate_add() would give [3, 3, 4], then [6, 4], then [10]
where this method looks like
[1, 2, 3, 4].recursive_add() gives [[3], [7]], [10]
It's the same number of adds, but it gives better O(N) performance on sublists
"""
if len(model_group) <= 1:
return model_group
added_values = []
iterable_group = iter(model_group)
for model in iterable_group:
next_model = next(iterable_group, None)
if next_model is not None:
combined = cls.merge_two_instances(next_model, model)
else:
combined = model
added_values.append(combined)
return cls._recursive_add(added_values)
@classmethod
def merge_instances_list(cls, result_rows: List["Model"]) -> List["Model"]:
"""
Merges a list of models into list of unique models.
Models can duplicate during joins when parent model has multiple child rows,
in the end all parent (main) models should be unique.
:param result_rows: list of already initialized Models with child models
populated, each instance is one row in db and some models can duplicate
:type result_rows: List["Model"]
:return: list of merged models where each main model is unique
:rtype: List["Model"]
"""
merged_rows: List["Model"] = []
grouped_instances: Dict = {}
for model in result_rows:
grouped_instances.setdefault(model.pk, []).append(model)
for group in grouped_instances.values():
model = cls._recursive_add(group)[0]
merged_rows.append(model)
return merged_rows
@classmethod
def merge_two_instances(
cls, one: "Model", other: "Model", relation_map: Optional[Dict] = None
) -> "Model":
"""
Merges current (other) Model and previous one (one) and returns the current
Model instance with data merged from previous one.
If needed it's calling itself recurrently and merges also children models.
:param relation_map: map of models relations to follow
:type relation_map: Dict
:param one: previous model instance
:type one: Model
:param other: current model instance
:type other: Model
:return: current Model instance with data merged from previous one.
:rtype: Model
"""
relation_map = (
relation_map
if relation_map is not None
else translate_list_to_dict(one._iterate_related_models())
)
for field_name in relation_map:
current_field = getattr(one, field_name)
other_value = getattr(other, field_name, [])
if isinstance(current_field, list):
value_to_set = cls._merge_items_lists(
field_name=field_name,
current_field=current_field,
other_value=other_value,
relation_map=relation_map,
)
setattr(other, field_name, value_to_set)
elif (
isinstance(current_field, ormar.Model)
and isinstance(other_value, ormar.Model)
and current_field.pk == other_value.pk
):
setattr(
other,
field_name,
cls.merge_two_instances(
current_field,
other_value,
relation_map=one._skip_ellipsis( # type: ignore
relation_map, field_name, default_return=dict()
),
),
)
other.set_save_status(True)
return other
@classmethod
def _merge_items_lists(
cls,
field_name: str,
current_field: List,
other_value: List,
relation_map: Optional[Dict],
) -> List:
"""
Takes two list of nested models and process them going deeper
according with the map.
If model from one's list is in other -> they are merged with relations
to follow passed from map.
If one's model is not in other it's simply appended to the list.
:param field_name: name of the current relation field
:type field_name: str
:param current_field: list of nested models from one model
:type current_field: List[Model]
:param other_value: list of nested models from other model
:type other_value: List[Model]
:param relation_map: map of relations to follow
:type relation_map: Dict
:return: merged list of models
:rtype: List[Model]
"""
value_to_set = [x for x in other_value]
for cur_field in current_field:
if cur_field in other_value:
old_value = next((x for x in other_value if x == cur_field), None)
new_val = cls.merge_two_instances(
cur_field,
cast("Model", old_value),
relation_map=cur_field._skip_ellipsis( # type: ignore
relation_map, field_name, default_return=dict()
),
)
value_to_set = [x for x in value_to_set if x != cur_field] + [new_val]
else:
value_to_set.append(cur_field)
return value_to_set
collerek-ormar-c09209a/ormar/models/mixins/pydantic_mixin.py 0000664 0000000 0000000 00000015315 15130200524 0024261 0 ustar 00root root 0000000 0000000 import copy
import string
from random import choices
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
List,
Optional,
Set,
Tuple,
Type,
Union,
cast,
)
import pydantic
from pydantic import BaseModel
from pydantic._internal._decorators import DecoratorInfos
from ormar.fields import BaseField, ForeignKeyField, ManyToManyField
from ormar.models.mixins.relation_mixin import RelationMixin # noqa: I100, I202
from ormar.queryset.utils import translate_list_to_dict
class PydanticMixin(RelationMixin):
__cache__: Dict[str, Type[pydantic.BaseModel]] = {}
if TYPE_CHECKING: # pragma: no cover
__pydantic_decorators__: DecoratorInfos
_skip_ellipsis: Callable
_get_not_excluded_fields: Callable
@classmethod
def get_pydantic(
cls,
*,
include: Union[Set, Dict, None] = None,
exclude: Union[Set, Dict, None] = None,
) -> Type[pydantic.BaseModel]:
"""
Returns a pydantic model out of ormar model.
Converts also nested ormar models into pydantic models.
Can be used to fully exclude certain fields in fastapi response and requests.
:param include: fields of own and nested models to include
:type include: Union[Set, Dict, None]
:param exclude: fields of own and nested models to exclude
:type exclude: Union[Set, Dict, None]
"""
relation_map = translate_list_to_dict(cls._iterate_related_models())
return cls._convert_ormar_to_pydantic(
include=include, exclude=exclude, relation_map=relation_map
)
@classmethod
def _convert_ormar_to_pydantic(
cls,
relation_map: Dict[str, Any],
include: Union[Set, Dict, None] = None,
exclude: Union[Set, Dict, None] = None,
) -> Type[pydantic.BaseModel]:
if include and isinstance(include, Set):
include = translate_list_to_dict(include)
if exclude and isinstance(exclude, Set):
exclude = translate_list_to_dict(exclude)
fields_dict: Dict[str, Any] = dict()
defaults: Dict[str, Any] = dict()
fields_to_process = cls._get_not_excluded_fields(
fields={*cls.ormar_config.model_fields.keys()},
include=include,
exclude=exclude,
)
fields_to_process.sort(
key=lambda x: list(cls.ormar_config.model_fields.keys()).index(x)
)
cache_key = f"{cls.__name__}_{str(include)}_{str(exclude)}"
if cache_key in cls.__cache__:
return cls.__cache__[cache_key]
for name in fields_to_process:
field = cls._determine_pydantic_field_type(
name=name,
defaults=defaults,
include=include,
exclude=exclude,
relation_map=relation_map,
)
if field is not None:
fields_dict[name] = field
model = type(
f"{cls.__name__}_{''.join(choices(string.ascii_uppercase, k=3))}",
(pydantic.BaseModel,),
{"__annotations__": fields_dict, **defaults},
)
model = cast(Type[pydantic.BaseModel], model)
cls._copy_field_validators(model=model)
cls.__cache__[cache_key] = model
return model
@classmethod
def _determine_pydantic_field_type(
cls,
name: str,
defaults: Dict,
include: Union[Set, Dict, None],
exclude: Union[Set, Dict, None],
relation_map: Dict[str, Any],
) -> Any:
field = cls.ormar_config.model_fields[name]
target: Any = None
if field.is_relation and name in relation_map:
target, default = cls._determined_included_relation_field_type(
name=name,
field=field,
include=include,
exclude=exclude,
defaults=defaults,
relation_map=relation_map,
)
elif not field.is_relation:
defaults[name] = cls.model_fields[name].default # type: ignore
target = field.__type__
if target is not None and field.nullable:
target = Optional[target]
return target
@classmethod
def _determined_included_relation_field_type(
cls,
name: str,
field: Union[BaseField, ForeignKeyField, ManyToManyField],
include: Union[Set, Dict, None],
exclude: Union[Set, Dict, None],
defaults: Dict,
relation_map: Dict[str, Any],
) -> Tuple[Type[BaseModel], Dict]:
target = field.to._convert_ormar_to_pydantic(
include=cls._skip_ellipsis(include, name),
exclude=cls._skip_ellipsis(exclude, name),
relation_map=cls._skip_ellipsis(relation_map, name, default_return=dict()),
)
if field.is_multi or field.virtual:
target = List[target] # type: ignore
if field.nullable:
defaults[name] = None
return target, defaults
@classmethod
def _copy_field_validators(cls, model: Type[pydantic.BaseModel]) -> None:
"""
Copy field validators from ormar model to generated pydantic model.
"""
filed_names = list(model.model_fields.keys())
cls.copy_selected_validators_type(
model=model, fields=filed_names, validator_type="field_validators"
)
cls.copy_selected_validators_type(
model=model, fields=filed_names, validator_type="validators"
)
class_validators = cls.__pydantic_decorators__.root_validators
model.__pydantic_decorators__.root_validators.update(
copy.deepcopy(class_validators)
)
model_validators = cls.__pydantic_decorators__.model_validators
model.__pydantic_decorators__.model_validators.update(
copy.deepcopy(model_validators)
)
model.model_rebuild(force=True)
@classmethod
def copy_selected_validators_type(
cls, model: Type[pydantic.BaseModel], fields: List[str], validator_type: str
) -> None:
"""
Copy field validators from ormar model to generated pydantic model.
"""
validators = getattr(cls.__pydantic_decorators__, validator_type)
for name, decorator in validators.items():
if any(field_name in decorator.info.fields for field_name in fields):
copied_decorator = copy.deepcopy(decorator)
copied_decorator.info.fields = [
field_name
for field_name in decorator.info.fields
if field_name in fields
]
getattr(model.__pydantic_decorators__, validator_type)[
name
] = copied_decorator
collerek-ormar-c09209a/ormar/models/mixins/relation_mixin.py 0000664 0000000 0000000 00000015052 15130200524 0024261 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Set, cast
from ormar import BaseField, ForeignKeyField
from ormar.models.traversible import NodeList
class RelationMixin:
"""
Used to return relation fields/names etc. from given model
"""
if TYPE_CHECKING: # pragma no cover
from ormar.models.ormar_config import OrmarConfig
ormar_config: OrmarConfig
__relation_map__: Optional[List[str]]
_related_names: Optional[Set]
_through_names: Optional[Set]
_related_fields: Optional[List]
get_name: Callable
@classmethod
def extract_db_own_fields(cls) -> Set:
"""
Returns only fields that are stored in the own database table, exclude all
related fields.
:return: set of model fields with relation fields excluded
:rtype: Set
"""
related_names = cls.extract_related_names()
self_fields = {
name
for name in cls.ormar_config.model_fields.keys()
if name not in related_names
}
return self_fields
@classmethod
def extract_related_fields(cls) -> List["ForeignKeyField"]:
"""
Returns List of ormar Fields for all relations declared on a model.
List is cached in cls._related_fields for quicker access.
:return: list of related fields
:rtype: List
"""
if cls._related_fields is not None:
return cls._related_fields
related_fields = []
for name in cls.extract_related_names().union(cls.extract_through_names()):
related_fields.append(
cast("ForeignKeyField", cls.ormar_config.model_fields[name])
)
cls._related_fields = related_fields
return related_fields
@classmethod
def extract_through_names(cls) -> Set[str]:
"""
Extracts related fields through names which are shortcuts to through models.
:return: set of related through fields names
:rtype: Set
"""
if cls._through_names is not None:
return cls._through_names
related_names = set()
for name, field in cls.ormar_config.model_fields.items():
if isinstance(field, BaseField) and field.is_through:
related_names.add(name)
cls._through_names = related_names
return related_names
@classmethod
def extract_related_names(cls) -> Set[str]:
"""
Returns List of fields names for all relations declared on a model.
List is cached in cls._related_names for quicker access.
:return: set of related fields names
:rtype: Set
"""
if cls._related_names is not None:
return cls._related_names
related_names = set()
for name, field in cls.ormar_config.model_fields.items():
if (
isinstance(field, BaseField)
and field.is_relation
and not field.is_through
and not field.skip_field
):
related_names.add(name)
cls._related_names = related_names
return related_names
@classmethod
def _extract_db_related_names(cls) -> Set:
"""
Returns only fields that are stored in the own database table, exclude
related fields that are not stored as foreign keys on given model.
:return: set of model fields with non fk relation fields excluded
:rtype: Set
"""
related_names = cls.extract_related_names()
related_names = {
name
for name in related_names
if cls.ormar_config.model_fields[name].is_valid_uni_relation()
}
return related_names
@classmethod
def _iterate_related_models( # noqa: CCR001
cls,
node_list: Optional[NodeList] = None,
parsed_map: Optional[Dict] = None,
source_relation: Optional[str] = None,
recurrent: bool = False,
) -> List[str]:
"""
Iterates related models recursively to extract relation strings of
nested not visited models.
:return: list of relation strings to be passed to select_related
:rtype: List[str]
"""
if not node_list:
if cls.__relation_map__:
return cls.__relation_map__
node_list = NodeList()
parsed_map = dict()
current_node = node_list.add(node_class=cls)
else:
current_node = node_list[-1]
relations = sorted(cls.extract_related_names())
processed_relations: List[str] = []
for relation in relations:
if not current_node.visited(relation):
target_model = cls.ormar_config.model_fields[relation].to
node_list.add(
node_class=target_model,
relation_name=relation,
parent_node=current_node,
)
relation_key = f"{cls.get_name()}_{relation}"
parsed_map = cast(Dict, parsed_map)
deep_relations = parsed_map.get(relation_key)
if not deep_relations:
deep_relations = target_model._iterate_related_models(
source_relation=relation,
node_list=node_list,
recurrent=True,
parsed_map=parsed_map,
)
parsed_map[relation_key] = deep_relations
processed_relations.extend(deep_relations)
result = cls._get_final_relations(processed_relations, source_relation)
if not recurrent:
cls.__relation_map__ = result
return result
@staticmethod
def _get_final_relations(
processed_relations: List, source_relation: Optional[str]
) -> List[str]:
"""
Helper method to prefix nested relation strings with current source relation
:param processed_relations: list of already processed relation str
:type processed_relations: List[str]
:param source_relation: name of the current relation
:type source_relation: str
:return: list of relation strings to be passed to select_related
:rtype: List[str]
"""
if processed_relations:
final_relations = [
f"{source_relation + '__' if source_relation else ''}{relation}"
for relation in processed_relations
]
else:
final_relations = [source_relation] if source_relation else []
return final_relations
collerek-ormar-c09209a/ormar/models/mixins/save_mixin.py 0000664 0000000 0000000 00000040733 15130200524 0023406 0 ustar 00root root 0000000 0000000 import base64
import uuid
from enum import Enum
from typing import (
TYPE_CHECKING,
Any,
Callable,
Collection,
Dict,
List,
Optional,
Set,
Union,
cast,
)
from pydantic.plugin._schema_validator import (
PluggableSchemaValidator,
create_schema_validator,
)
from pydantic_core import CoreSchema, SchemaValidator
import ormar # noqa: I100, I202
from ormar.exceptions import ModelPersistenceError
from ormar.fields.parsers import encode_json
from ormar.models.mixins import AliasMixin
from ormar.models.mixins.relation_mixin import RelationMixin
if TYPE_CHECKING: # pragma: no cover
from ormar import ForeignKeyField, Model
class SavePrepareMixin(RelationMixin, AliasMixin):
"""
Used to prepare models to be saved in database
"""
if TYPE_CHECKING: # pragma: nocover
_skip_ellipsis: Callable
_json_fields: Set[str]
_bytes_fields: Set[str]
__pydantic_core_schema__: CoreSchema
__ormar_fields_validators__: Optional[
Dict[str, Union[SchemaValidator, PluggableSchemaValidator]]
]
@classmethod
def prepare_model_to_save(cls, new_kwargs: dict) -> dict:
"""
Combines all preparation methods before saving.
Removes primary key for if it's nullable or autoincrement pk field,
and it's set to None.
Substitute related models with their primary key values as fk column.
Populates the default values for field with default set and no value.
Translate columns into aliases (db names).
:param new_kwargs: dictionary of model that is about to be saved
:type new_kwargs: Dict[str, str]
:return: dictionary of model that is about to be saved
:rtype: Dict[str, str]
"""
new_kwargs = cls._remove_pk_from_kwargs(new_kwargs)
new_kwargs = cls._remove_not_ormar_fields(new_kwargs)
new_kwargs = cls.substitute_models_with_pks(new_kwargs)
new_kwargs = cls.populate_default_values(new_kwargs)
new_kwargs = cls.reconvert_str_to_bytes(new_kwargs)
new_kwargs = cls.translate_columns_to_aliases(new_kwargs)
return new_kwargs
@classmethod
def prepare_model_to_update(cls, new_kwargs: dict) -> dict:
"""
Combines all preparation methods before updating.
:param new_kwargs: dictionary of model that is about to be saved
:type new_kwargs: Dict[str, str]
:return: dictionary of model that is about to be updated
:rtype: Dict[str, str]
"""
new_kwargs = cls.parse_non_db_fields(new_kwargs)
new_kwargs = cls.substitute_models_with_pks(new_kwargs)
new_kwargs = cls.reconvert_str_to_bytes(new_kwargs)
new_kwargs = cls.dump_all_json_fields_to_str(new_kwargs)
new_kwargs = cls.translate_columns_to_aliases(new_kwargs)
new_kwargs = cls.translate_enum_columns(new_kwargs)
return new_kwargs
@classmethod
def translate_enum_columns(cls, new_kwargs: dict) -> dict:
for key, value in new_kwargs.items():
if isinstance(value, Enum):
new_kwargs[key] = value.name
return new_kwargs
@classmethod
def _remove_not_ormar_fields(cls, new_kwargs: dict) -> dict:
"""
Removes primary key for if it's nullable or autoincrement pk field,
and it's set to None.
:param new_kwargs: dictionary of model that is about to be saved
:type new_kwargs: Dict[str, str]
:return: dictionary of model that is about to be saved
:rtype: Dict[str, str]
"""
ormar_fields = {k for k, v in cls.ormar_config.model_fields.items()}
new_kwargs = {k: v for k, v in new_kwargs.items() if k in ormar_fields}
return new_kwargs
@classmethod
def _remove_pk_from_kwargs(cls, new_kwargs: dict) -> dict:
"""
Removes primary key for if it's nullable or autoincrement pk field,
and it's set to None.
:param new_kwargs: dictionary of model that is about to be saved
:type new_kwargs: Dict[str, str]
:return: dictionary of model that is about to be saved
:rtype: Dict[str, str]
"""
pkname = cls.ormar_config.pkname
pk = cls.ormar_config.model_fields[pkname]
if new_kwargs.get(pkname, ormar.Undefined) is None and (
pk.nullable or pk.autoincrement
):
del new_kwargs[pkname]
return new_kwargs
@classmethod
def parse_non_db_fields(cls, model_dict: Dict) -> Dict:
"""
Receives dictionary of model that is about to be saved and changes uuid fields
to strings in bulk_update.
:param model_dict: dictionary of model that is about to be saved
:type model_dict: Dict
:return: dictionary of model that is about to be saved
:rtype: Dict
"""
for name, field in cls.ormar_config.model_fields.items():
if field.__type__ == uuid.UUID and name in model_dict:
parsers = {"string": lambda x: str(x), "hex": lambda x: "%.32x" % x.int}
uuid_format = field.column_type.uuid_format
parser: Callable[..., Any] = parsers.get(uuid_format, lambda x: x)
model_dict[name] = parser(model_dict[name])
return model_dict
@classmethod
def substitute_models_with_pks(cls, model_dict: Dict) -> Dict: # noqa CCR001
"""
Receives dictionary of model that is about to be saved and changes all related
models that are stored as foreign keys to their fk value.
:param model_dict: dictionary of model that is about to be saved
:type model_dict: Dict
:return: dictionary of model that is about to be saved
:rtype: Dict
"""
for field in cls.extract_related_names():
field_value = model_dict.get(field, None)
if field_value is not None:
target_field = cls.ormar_config.model_fields[field]
target_pkname = target_field.to.ormar_config.pkname
if isinstance(field_value, ormar.Model): # pragma: no cover
pk_value = getattr(field_value, target_pkname)
if not pk_value:
raise ModelPersistenceError(
f"You cannot save {field_value.get_name()} "
f"model without pk set!"
)
model_dict[field] = pk_value
elif isinstance(field_value, (list, dict)) and field_value:
if isinstance(field_value, list):
model_dict[field] = [
target.get(target_pkname) for target in field_value
]
else:
model_dict[field] = field_value.get(target_pkname)
else:
model_dict.pop(field, None)
return model_dict
@classmethod
def reconvert_str_to_bytes(cls, model_dict: Dict) -> Dict:
"""
Receives dictionary of model that is about to be saved and changes
all bytes fields that are represented as strings back into bytes.
:param model_dict: dictionary of model that is about to be saved
:type model_dict: Dict
:return: dictionary of model that is about to be saved
:rtype: Dict
"""
bytes_base64_fields = {
name
for name, field in cls.ormar_config.model_fields.items()
if field.represent_as_base64_str
}
for key, value in model_dict.items():
if key in cls._bytes_fields and isinstance(value, str):
model_dict[key] = (
value.encode("utf-8")
if key not in bytes_base64_fields
else base64.b64decode(value)
)
return model_dict
@classmethod
def dump_all_json_fields_to_str(cls, model_dict: Dict) -> Dict:
"""
Receives dictionary of model that is about to be saved and changes
all json fields into strings
:param model_dict: dictionary of model that is about to be saved
:type model_dict: Dict
:return: dictionary of model that is about to be saved
:rtype: Dict
"""
for key, value in model_dict.items():
if key in cls._json_fields:
model_dict[key] = encode_json(value)
return model_dict
@classmethod
def populate_default_values(cls, new_kwargs: Dict) -> Dict:
"""
Receives dictionary of model that is about to be saved and populates the default
value on the fields that have the default value set, but no actual value was
passed by the user.
:param new_kwargs: dictionary of model that is about to be saved
:type new_kwargs: Dict
:return: dictionary of model that is about to be saved
:rtype: Dict
"""
for field_name, field in cls.ormar_config.model_fields.items():
if field_name not in new_kwargs and field.has_default(use_server=False):
new_kwargs[field_name] = field.get_default()
# clear fields with server_default set as None
if (
field.server_default is not None
and new_kwargs.get(field_name, None) is None
):
new_kwargs.pop(field_name, None)
return new_kwargs
@classmethod
def validate_enums(cls, new_kwargs: Dict) -> Dict:
"""
Receives dictionary of model that is about to be saved and validates the
fields with choices set to see if the value is allowed.
:param new_kwargs: dictionary of model that is about to be saved
:type new_kwargs: Dict
:return: dictionary of model that is about to be saved
:rtype: Dict
"""
validators = cls._build_individual_schema_validator()
for key, value in new_kwargs.items():
if key in validators:
validators[key].validate_python(value)
return new_kwargs
@classmethod
def _build_individual_schema_validator(cls) -> Any:
if cls.__ormar_fields_validators__ is not None:
return cls.__ormar_fields_validators__
field_validators = {}
for key, field in cls._extract_pydantic_fields().items():
if cls.__pydantic_core_schema__["type"] == "definitions":
schema = {
"type": "definitions",
"schema": field["schema"],
"definitions": cls.__pydantic_core_schema__["definitions"],
}
else:
schema = field["schema"]
field_validators[key] = create_schema_validator(
schema, cls, cls.__module__, cls.__qualname__, "BaseModel"
)
cls.__ormar_fields_validators__ = field_validators
return cls.__ormar_fields_validators__
@classmethod
def _extract_pydantic_fields(cls) -> Any:
if cls.__pydantic_core_schema__["type"] == "model":
return cls.__pydantic_core_schema__["schema"]["fields"]
elif cls.__pydantic_core_schema__["type"] == "definitions":
main_schema = cls.__pydantic_core_schema__["schema"]
if "schema_ref" in main_schema: # pragma: no cover
reference_id = main_schema["schema_ref"]
return next(
ref
for ref in cls.__pydantic_core_schema__["definitions"]
if ref["ref"] == reference_id
)["schema"]["fields"]
return main_schema["schema"]["fields"]
@staticmethod
async def _upsert_model(
instance: "Model",
save_all: bool,
previous_model: Optional["Model"],
relation_field: Optional["ForeignKeyField"],
update_count: int,
) -> int:
"""
Method updates given instance if:
* instance is not saved or
* instance have no pk or
* save_all=True flag is set
and instance is not __pk_only__.
If relation leading to instance is a ManyToMany also the through model is saved
:param instance: current model to upsert
:type instance: Model
:param save_all: flag if all models should be saved or only not saved ones
:type save_all: bool
:param relation_field: field with relation
:type relation_field: Optional[ForeignKeyField]
:param previous_model: previous model from which method came
:type previous_model: Model
:param update_count: no of updated models
:type update_count: int
:return: no of updated models
:rtype: int
"""
if (
save_all or not instance.pk or not instance.saved
) and not instance.__pk_only__:
await instance.upsert(__force_save__=True)
if relation_field and relation_field.is_multi:
await instance._upsert_through_model(
instance=instance,
relation_field=relation_field,
previous_model=cast("Model", previous_model),
)
update_count += 1
return update_count
@staticmethod
async def _upsert_through_model(
instance: "Model", previous_model: "Model", relation_field: "ForeignKeyField"
) -> None:
"""
Upsert through model for m2m relation.
:param instance: current model to upsert
:type instance: Model
:param relation_field: field with relation
:type relation_field: Optional[ForeignKeyField]
:param previous_model: previous model from which method came
:type previous_model: Model
"""
through_name = previous_model.ormar_config.model_fields[
relation_field.name
].through.get_name()
through = getattr(instance, through_name)
if through:
through_dict = through.model_dump(exclude=through.extract_related_names())
else:
through_dict = {}
await getattr(
previous_model, relation_field.name
).queryset_proxy.upsert_through_instance(instance, **through_dict)
async def _update_relation_list(
self,
fields_list: Collection["ForeignKeyField"],
follow: bool,
save_all: bool,
relation_map: Dict,
update_count: int,
) -> int:
"""
Internal method used in save_related to follow deeper from
related models and update numbers of updated related instances.
:type save_all: flag if all models should be saved
:type save_all: bool
:param fields_list: list of ormar fields to follow and save
:type fields_list: Collection["ForeignKeyField"]
:param relation_map: map of relations to follow
:type relation_map: Dict
:param follow: flag to trigger deep save -
by default only directly related models are saved
with follow=True also related models of related models are saved
:type follow: bool
:param update_count: internal parameter for recursive calls -
number of updated instances
:type update_count: int
:return: tuple of update count and visited
:rtype: int
"""
for field in fields_list:
values = self._get_field_values(name=field.name)
for value in values:
if follow:
update_count = await value.save_related(
follow=follow,
save_all=save_all,
relation_map=self._skip_ellipsis( # type: ignore
relation_map, field.name, default_return={}
),
update_count=update_count,
previous_model=self,
relation_field=field,
)
else:
update_count = await value._upsert_model(
instance=value,
save_all=save_all,
previous_model=self,
relation_field=field,
update_count=update_count,
)
return update_count
def _get_field_values(self, name: str) -> List:
"""
Extract field values and ensures it is a list.
:param name: name of the field
:type name: str
:return: list of values
:rtype: List
"""
values = getattr(self, name) or []
if not isinstance(values, list):
values = [values]
return values
collerek-ormar-c09209a/ormar/models/model.py 0000664 0000000 0000000 00000031720 15130200524 0021031 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, TypeVar, Union
import ormar.queryset # noqa I100
from ormar.exceptions import ModelPersistenceError, NoMatch
from ormar.models import NewBaseModel # noqa I100
from ormar.models.model_row import ModelRow
from ormar.queryset.utils import subtract_dict, translate_list_to_dict
T = TypeVar("T", bound="Model")
if TYPE_CHECKING: # pragma: no cover
from ormar import ForeignKeyField
from ormar.models.ormar_config import OrmarConfig
class Model(ModelRow):
__abstract__ = False
if TYPE_CHECKING: # pragma nocover
ormar_config: OrmarConfig
def __repr__(self) -> str: # pragma nocover
_repr = {
k: getattr(self, k)
for k, v in self.ormar_config.model_fields.items()
if not v.skip_field
}
return f"{self.__class__.__name__}({str(_repr)})"
async def upsert(self: T, **kwargs: Any) -> T:
"""
Performs either a save or an update depending on the presence of the pk.
If the pk field is filled it's an update, otherwise the save is performed.
For save kwargs are ignored, used only in update if provided.
:param kwargs: list of fields to update
:type kwargs: Any
:return: saved Model
:rtype: Model
"""
force_save = kwargs.pop("__force_save__", False)
if force_save:
expr = self.ormar_config.table.select().where(self.pk_column == self.pk)
row = await self.ormar_config.database.fetch_one(expr)
if not row:
return await self.save()
return await self.update(**kwargs)
if not self.pk:
return await self.save()
return await self.update(**kwargs)
async def save(self: T) -> T:
"""
Performs a save of given Model instance.
If primary key is already saved, db backend will throw integrity error.
Related models are saved by pk number, reverse relation and many to many fields
are not saved - use corresponding relations methods.
If there are fields with server_default set and those fields
are not already filled save will trigger also a second query
to refreshed the fields populated server side.
Does not recognize if model was previously saved.
If you want to perform update or insert depending on the pk
fields presence use upsert.
Sends pre_save and post_save signals.
Sets model save status to True.
:return: saved Model
:rtype: Model
"""
await self.signals.pre_save.send(sender=self.__class__, instance=self)
self_fields = self._extract_model_db_fields()
if (
not self.pk
and self.ormar_config.model_fields[self.ormar_config.pkname].autoincrement
):
self_fields.pop(self.ormar_config.pkname, None)
self_fields = self.populate_default_values(self_fields)
self.update_from_dict(
{
k: v
for k, v in self_fields.items()
if k not in self.extract_related_names()
}
)
self_fields = self.translate_columns_to_aliases(self_fields)
expr = self.ormar_config.table.insert()
expr = expr.values(**self_fields)
pk = await self.ormar_config.database.execute(expr)
if pk and isinstance(pk, self.pk_type()):
setattr(self, self.ormar_config.pkname, pk)
self.set_save_status(True)
# refresh server side defaults
if any(
field.server_default is not None
for name, field in self.ormar_config.model_fields.items()
if name not in self_fields
):
await self.load()
await self.signals.post_save.send(sender=self.__class__, instance=self)
return self
async def save_related( # noqa: CCR001, CFQ002
self,
follow: bool = False,
save_all: bool = False,
relation_map: Optional[Dict] = None,
exclude: Union[Set, Dict, None] = None,
update_count: int = 0,
previous_model: Optional["Model"] = None,
relation_field: Optional["ForeignKeyField"] = None,
) -> int:
"""
Triggers a upsert method on all related models
if the instances are not already saved.
By default saves only the directly related ones.
If follow=True is set it saves also related models of related models.
To not get stuck in an infinite loop as related models also keep a relation
to parent model visited models set is kept.
That way already visited models that are nested are saved, but the save do not
follow them inside. So Model A -> Model B -> Model A -> Model C will save second
Model A but will never follow into Model C.
Nested relations of those kind need to be persisted manually.
:param relation_field: field with relation leading to this model
:type relation_field: Optional[ForeignKeyField]
:param previous_model: previous model from which method came
:type previous_model: Model
:param exclude: items to exclude during saving of relations
:type exclude: Union[Set, Dict]
:param relation_map: map of relations to follow
:type relation_map: Dict
:param save_all: flag if all models should be saved or only not saved ones
:type save_all: bool
:param follow: flag to trigger deep save -
by default only directly related models are saved
with follow=True also related models of related models are saved
:type follow: bool
:param update_count: internal parameter for recursive calls -
number of updated instances
:type update_count: int
:return: number of updated/saved models
:rtype: int
"""
relation_map = (
relation_map
if relation_map is not None
else translate_list_to_dict(self._iterate_related_models())
)
if exclude and isinstance(exclude, Set):
exclude = translate_list_to_dict(exclude)
relation_map = subtract_dict(relation_map, exclude or {})
if relation_map:
fields_to_visit = {
field
for field in self.extract_related_fields()
if field.name in relation_map
}
pre_save = {
field
for field in fields_to_visit
if not field.virtual and not field.is_multi
}
update_count = await self._update_relation_list(
fields_list=pre_save,
follow=follow,
save_all=save_all,
relation_map=relation_map,
update_count=update_count,
)
update_count = await self._upsert_model(
instance=self,
save_all=save_all,
previous_model=previous_model,
relation_field=relation_field,
update_count=update_count,
)
post_save = fields_to_visit - pre_save
update_count = await self._update_relation_list(
fields_list=post_save,
follow=follow,
save_all=save_all,
relation_map=relation_map,
update_count=update_count,
)
else:
update_count = await self._upsert_model(
instance=self,
save_all=save_all,
previous_model=previous_model,
relation_field=relation_field,
update_count=update_count,
)
return update_count
async def update(self: T, _columns: Optional[List[str]] = None, **kwargs: Any) -> T:
"""
Performs update of Model instance in the database.
Fields can be updated before or you can pass them as kwargs.
Sends pre_update and post_update signals.
Sets model save status to True.
:param _columns: list of columns to update, if None all are updated
:type _columns: List
:raises ModelPersistenceError: If the pk column is not set
:param kwargs: list of fields to update as field=value pairs
:type kwargs: Any
:return: updated Model
:rtype: Model
"""
if kwargs:
self.update_from_dict(kwargs)
if not self.pk:
raise ModelPersistenceError(
"You cannot update not saved model! Use save or upsert method."
)
await self.signals.pre_update.send(
sender=self.__class__, instance=self, passed_args=kwargs
)
self_fields = self._extract_model_db_fields()
self_fields.pop(self.get_column_name_from_alias(self.ormar_config.pkname))
if _columns:
self_fields = {k: v for k, v in self_fields.items() if k in _columns}
if self_fields:
self_fields = self.translate_columns_to_aliases(self_fields)
expr = self.ormar_config.table.update().values(**self_fields)
expr = expr.where(self.pk_column == getattr(self, self.ormar_config.pkname))
await self.ormar_config.database.execute(expr)
self.set_save_status(True)
await self.signals.post_update.send(sender=self.__class__, instance=self)
return self
async def delete(self) -> int:
"""
Removes the Model instance from the database.
Sends pre_delete and post_delete signals.
Sets model save status to False.
Note it does not delete the Model itself (python object).
So you can delete and later save (since pk is deleted no conflict will arise)
or update and the Model will be saved in database again.
:return: number of deleted rows (for some backends)
:rtype: int
"""
await self.signals.pre_delete.send(sender=self.__class__, instance=self)
expr = self.ormar_config.table.delete()
expr = expr.where(self.pk_column == (getattr(self, self.ormar_config.pkname)))
result = await self.ormar_config.database.execute(expr)
self.set_save_status(False)
await self.signals.post_delete.send(sender=self.__class__, instance=self)
return result
async def load(self: T) -> T:
"""
Allow to refresh existing Models fields from database.
Be careful as the related models can be overwritten by pk_only models in load.
Does NOT refresh the related models fields if they were loaded before.
:raises NoMatch: If given pk is not found in database.
:return: reloaded Model
:rtype: Model
"""
expr = self.ormar_config.table.select().where(self.pk_column == self.pk)
row = await self.ormar_config.database.fetch_one(expr)
if not row: # pragma nocover
raise NoMatch("Instance was deleted from database and cannot be refreshed")
kwargs = dict(row)
kwargs = self.translate_aliases_to_columns(kwargs)
self.update_from_dict(kwargs)
self.set_save_status(True)
return self
async def load_all(
self: T,
follow: bool = False,
exclude: Union[List, str, Set, Dict, None] = None,
order_by: Union[List, str, None] = None,
) -> T:
"""
Allow to refresh existing Models fields from database.
Performs refresh of the related models fields.
By default, loads only self and the directly related ones.
If follow=True is set it loads also related models of related models.
To not get stuck in an infinite loop as related models also keep a relation
to parent model visited models set is kept.
That way already visited models that are nested are loaded, but the load do not
follow them inside. So Model A -> Model B -> Model C -> Model A -> Model X
will load second Model A but will never follow into Model X.
Nested relations of those kind need to be loaded manually.
:param order_by: columns by which models should be sorted
:type order_by: Union[List, str]
:raises NoMatch: If given pk is not found in database.
:param exclude: related models to exclude
:type exclude: Union[List, str, Set, Dict]
:param follow: flag to trigger deep save -
by default only directly related models are saved
with follow=True also related models of related models are saved
:type follow: bool
:return: reloaded Model
:rtype: Model
"""
relations = list(self.extract_related_names())
if follow:
relations = self._iterate_related_models()
queryset = self.__class__.objects
if exclude:
queryset = queryset.exclude_fields(exclude)
if order_by:
queryset = queryset.order_by(order_by)
instance = await queryset.select_related(relations).get(pk=self.pk)
self._orm.clear()
self.update_from_dict(instance.model_dump())
return self
collerek-ormar-c09209a/ormar/models/model_row.py 0000664 0000000 0000000 00000035753 15130200524 0021732 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union, cast
try:
from sqlalchemy.engine.result import ResultProxy # type: ignore
except ImportError: # pragma: no cover
from sqlalchemy.engine.result import Row as ResultProxy # type: ignore
from ormar.models import NewBaseModel # noqa: I202
from ormar.models.excludable import ExcludableItems
from ormar.models.helpers.models import group_related_list
if TYPE_CHECKING: # pragma: no cover
from ormar.fields import ForeignKeyField
from ormar.models import Model
class ModelRow(NewBaseModel):
@classmethod
def from_row( # noqa: CFQ002
cls,
row: ResultProxy,
source_model: Type["Model"],
select_related: Optional[List] = None,
related_models: Any = None,
related_field: Optional["ForeignKeyField"] = None,
excludable: Optional[ExcludableItems] = None,
current_relation_str: str = "",
proxy_source_model: Optional[Type["Model"]] = None,
used_prefixes: Optional[List[str]] = None,
) -> Optional["Model"]:
"""
Model method to convert raw sql row from database into ormar.Model instance.
Traverses nested models if they were specified in select_related for query.
Called recurrently and returns model instance if it's present in the row.
Note that it's processing one row at a time, so if there are duplicates of
parent row that needs to be joined/combined
(like parent row in sql join with 2+ child rows)
instances populated in this method are later combined in the QuerySet.
Other method working directly on raw database results is in prefetch_query,
where rows are populated in a different way as they do not have
nested models in result.
:param used_prefixes: list of already extracted prefixes
:type used_prefixes: List[str]
:param proxy_source_model: source model from which querysetproxy is constructed
:type proxy_source_model: Optional[Type["ModelRow"]]
:param excludable: structure of fields to include and exclude
:type excludable: ExcludableItems
:param current_relation_str: name of the relation field
:type current_relation_str: str
:param source_model: model on which relation was defined
:type source_model: Type[Model]
:param row: raw result row from the database
:type row: ResultProxy
:param select_related: list of names of related models fetched from database
:type select_related: List
:param related_models: list or dict of related models
:type related_models: Union[List, Dict]
:param related_field: field with relation declaration
:type related_field: ForeignKeyField
:return: returns model if model is populated from database
:rtype: Optional[Model]
"""
item: Dict[str, Any] = {}
select_related = select_related or []
related_models = related_models or []
table_prefix = ""
used_prefixes = used_prefixes if used_prefixes is not None else []
excludable = excludable or ExcludableItems()
if select_related:
related_models = group_related_list(select_related)
if related_field:
table_prefix = cls._process_table_prefix(
source_model=source_model,
current_relation_str=current_relation_str,
related_field=related_field,
used_prefixes=used_prefixes,
)
item = cls._populate_nested_models_from_row(
item=item,
row=row,
related_models=related_models,
excludable=excludable,
current_relation_str=current_relation_str,
source_model=source_model, # type: ignore
proxy_source_model=proxy_source_model, # type: ignore
table_prefix=table_prefix,
used_prefixes=used_prefixes,
)
item = cls.extract_prefixed_table_columns(
item=item, row=row, table_prefix=table_prefix, excludable=excludable
)
instance: Optional["Model"] = None
if item.get(cls.ormar_config.pkname, None) is not None:
item["__excluded__"] = cls.get_names_to_exclude(
excludable=excludable, alias=table_prefix
)
instance = cast("Model", cls(**item))
instance.set_save_status(True)
return instance
@classmethod
def _process_table_prefix(
cls,
source_model: Type["Model"],
current_relation_str: str,
related_field: "ForeignKeyField",
used_prefixes: List[str],
) -> str:
"""
:param source_model: model on which relation was defined
:type source_model: Type[Model]
:param current_relation_str: current relation string
:type current_relation_str: str
:param related_field: field with relation declaration
:type related_field: "ForeignKeyField"
:param used_prefixes: list of already extracted prefixes
:type used_prefixes: List[str]
:return: table_prefix to use
:rtype: str
"""
if related_field.is_multi:
previous_model = related_field.through
else:
previous_model = related_field.owner
table_prefix = cls.ormar_config.alias_manager.resolve_relation_alias(
from_model=previous_model, relation_name=related_field.name
)
if not table_prefix or table_prefix in used_prefixes:
manager = cls.ormar_config.alias_manager
table_prefix = manager.resolve_relation_alias_after_complex(
source_model=source_model,
relation_str=current_relation_str,
relation_field=related_field,
)
used_prefixes.append(table_prefix)
return table_prefix
@classmethod
def _populate_nested_models_from_row( # noqa: CFQ002
cls,
item: dict,
row: ResultProxy,
source_model: Type["Model"],
related_models: Any,
excludable: ExcludableItems,
table_prefix: str,
used_prefixes: List[str],
current_relation_str: Optional[str] = None,
proxy_source_model: Optional[Type["Model"]] = None,
) -> dict:
"""
Traverses structure of related models and populates the nested models
from the database row.
Related models can be a list if only directly related models are to be
populated, converted to dict if related models also have their own related
models to be populated.
Recurrently calls from_row method on nested instances and create nested
instances. In the end those instances are added to the final model dictionary.
:param proxy_source_model: source model from which querysetproxy is constructed
:type proxy_source_model: Optional[Type["ModelRow"]]
:param excludable: structure of fields to include and exclude
:type excludable: ExcludableItems
:param source_model: source model from which relation started
:type source_model: Type[Model]
:param current_relation_str: joined related parts into one string
:type current_relation_str: str
:param item: dictionary of already populated nested models, otherwise empty dict
:type item: Dict
:param row: raw result row from the database
:type row: ResultProxy
:param related_models: list or dict of related models
:type related_models: Union[Dict, List]
:return: dictionary with keys corresponding to model fields names
and values are database values
:rtype: Dict
"""
for related in related_models:
field = cls.ormar_config.model_fields[related]
field = cast("ForeignKeyField", field)
model_cls = field.to
model_excludable = excludable.get(
model_cls=cast(Type["Model"], cls), alias=table_prefix
)
if model_excludable.is_excluded(related):
continue
relation_str, remainder = cls._process_remainder_and_relation_string(
related_models=related_models,
current_relation_str=current_relation_str,
related=related,
)
child = model_cls.from_row(
row,
related_models=remainder,
related_field=field,
excludable=excludable,
current_relation_str=relation_str,
source_model=source_model,
proxy_source_model=proxy_source_model,
used_prefixes=used_prefixes,
)
item[model_cls.get_column_name_from_alias(related)] = child
if (
field.is_multi
and child
and not model_excludable.is_excluded(field.through.get_name())
):
cls._populate_through_instance(
row=row,
item=item,
related=related,
excludable=excludable,
child=child,
proxy_source_model=proxy_source_model,
)
return item
@staticmethod
def _process_remainder_and_relation_string(
related_models: Union[Dict, List],
current_relation_str: Optional[str],
related: str,
) -> Tuple[str, Optional[Union[Dict, List]]]:
"""
Process remainder models and relation string
:param related_models: list or dict of related models
:type related_models: Union[Dict, List]
:param current_relation_str: current relation string
:type current_relation_str: Optional[str]
:param related: name of the relation
:type related: str
"""
relation_str = (
"__".join([current_relation_str, related])
if current_relation_str
else related
)
remainder = None
if isinstance(related_models, dict) and related_models[related]:
remainder = related_models[related]
return relation_str, remainder
@classmethod
def _populate_through_instance( # noqa: CFQ002
cls,
row: ResultProxy,
item: Dict,
related: str,
excludable: ExcludableItems,
child: "Model",
proxy_source_model: Optional[Type["Model"]],
) -> None:
"""
Populates the through model on reverse side of current query.
Normally it's child class, unless the query is from queryset.
:param row: row from db result
:type row: ResultProxy
:param item: parent item dict
:type item: Dict
:param related: current relation name
:type related: str
:param excludable: structure of fields to include and exclude
:type excludable: ExcludableItems
:param child: child item of parent
:type child: "Model"
:param proxy_source_model: source model from which querysetproxy is constructed
:type proxy_source_model: Type["Model"]
"""
through_name = cls.ormar_config.model_fields[related].through.get_name()
through_child = cls._create_through_instance(
row=row, related=related, through_name=through_name, excludable=excludable
)
if child.__class__ != proxy_source_model:
setattr(child, through_name, through_child)
else:
item[through_name] = through_child
child.set_save_status(True)
@classmethod
def _create_through_instance(
cls,
row: ResultProxy,
through_name: str,
related: str,
excludable: ExcludableItems,
) -> "ModelRow":
"""
Initialize the through model from db row.
Excluded all relation fields and other exclude/include set in excludable.
:param row: loaded row from database
:type row: sqlalchemy.engine.ResultProxy
:param through_name: name of the through field
:type through_name: str
:param related: name of the relation
:type related: str
:param excludable: structure of fields to include and exclude
:type excludable: ExcludableItems
:return: initialized through model without relation
:rtype: "ModelRow"
"""
model_cls = cls.ormar_config.model_fields[through_name].to
table_prefix = cls.ormar_config.alias_manager.resolve_relation_alias(
from_model=cls, relation_name=related
)
# remove relations on through field
model_excludable = excludable.get(model_cls=model_cls, alias=table_prefix)
model_excludable.set_values(
value=model_cls.extract_related_names(), is_exclude=True
)
child_dict = model_cls.extract_prefixed_table_columns(
item={}, row=row, excludable=excludable, table_prefix=table_prefix
)
child_dict["__excluded__"] = model_cls.get_names_to_exclude(
excludable=excludable, alias=table_prefix
)
child = model_cls(**child_dict) # type: ignore
return child
@classmethod
def extract_prefixed_table_columns(
cls,
item: dict,
row: ResultProxy,
table_prefix: str,
excludable: ExcludableItems,
) -> Dict:
"""
Extracts own fields from raw sql result, using a given prefix.
Prefix changes depending on the table's position in a join.
If the table is a main table, there is no prefix.
All joined tables have prefixes to allow duplicate column names,
as well as duplicated joins to the same table from multiple different tables.
Extracted fields populates the related dict later used to construct a Model.
Used in Model.from_row and PrefetchQuery._populate_rows methods.
:param excludable: structure of fields to include and exclude
:type excludable: ExcludableItems
:param item: dictionary of already populated nested models, otherwise empty dict
:type item: Dict
:param row: raw result row from the database
:type row: sqlalchemy.engine.result.ResultProxy
:param table_prefix: prefix of the table from AliasManager
each pair of tables have own prefix (two of them depending on direction) -
used in joins to allow multiple joins to the same table.
:type table_prefix: str
:return: dictionary with keys corresponding to model fields names
and values are database values
:rtype: Dict
"""
selected_columns = cls.own_table_columns(
model=cls, excludable=excludable, alias=table_prefix, use_alias=False
)
column_prefix = table_prefix + "_" if table_prefix else ""
for column in cls.ormar_config.table.columns:
alias = cls.get_column_name_from_alias(column.name)
if alias not in item and alias in selected_columns:
prefixed_name = f"{column_prefix}{column.name}"
item[alias] = row[prefixed_name]
return item
collerek-ormar-c09209a/ormar/models/modelproxy.py 0000664 0000000 0000000 00000000553 15130200524 0022133 0 ustar 00root root 0000000 0000000 from ormar.models.mixins import (
ExcludableMixin,
MergeModelMixin,
PydanticMixin,
SavePrepareMixin,
)
class ModelTableProxy(
MergeModelMixin,
SavePrepareMixin,
ExcludableMixin,
PydanticMixin,
):
"""
Used to combine all mixins with different set of functionalities.
One of the bases of the ormar Model class.
"""
collerek-ormar-c09209a/ormar/models/newbasemodel.py 0000664 0000000 0000000 00000125015 15130200524 0022377 0 ustar 00root root 0000000 0000000 import base64
import sys
import warnings
from typing import (
TYPE_CHECKING,
AbstractSet,
Any,
Dict,
List,
Literal,
Mapping,
MutableSequence,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
import pydantic
import sqlalchemy
import typing_extensions
import ormar # noqa I100
from ormar.exceptions import ModelError, ModelPersistenceError
from ormar.fields.foreign_key import ForeignKeyField
from ormar.fields.parsers import decode_bytes, encode_json
from ormar.models.helpers import register_relation_in_alias_manager
from ormar.models.helpers.relations import expand_reverse_relationship
from ormar.models.helpers.sqlalchemy import (
populate_config_sqlalchemy_table_if_required,
update_column_definition,
)
from ormar.models.metaclass import ModelMetaclass
from ormar.models.modelproxy import ModelTableProxy
from ormar.models.utils import Extra
from ormar.queryset.utils import translate_list_to_dict
from ormar.relations.alias_manager import AliasManager
from ormar.relations.relation import Relation
from ormar.relations.relation_manager import RelationsManager
from ormar.warnings import OrmarDeprecatedSince020
if TYPE_CHECKING: # pragma no cover
from ormar.models import Model, OrmarConfig
from ormar.signals import SignalEmitter
T = TypeVar("T", bound="NewBaseModel")
IntStr = Union[int, str]
DictStrAny = Dict[str, Any]
SetStr = Set[str]
AbstractSetIntStr = AbstractSet[IntStr]
MappingIntStrAny = Mapping[IntStr, Any]
class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass):
"""
Main base class of ormar Model.
Inherits from pydantic BaseModel and has all mixins combined in ModelTableProxy.
Constructed with ModelMetaclass which in turn also inherits pydantic metaclass.
Abstracts away all internals and helper functions, so final Model class has only
the logic concerned with database connection and data persistence.
"""
__slots__ = (
"_orm_id",
"_orm_saved",
"_orm",
"_pk_column",
"__pk_only__",
"__cached_hash__",
"__pydantic_extra__",
"__pydantic_fields_set__",
)
if TYPE_CHECKING: # pragma no cover
pk: Any
__relation_map__: Optional[List[str]]
__cached_hash__: Optional[int]
_orm_relationship_manager: AliasManager
_orm: RelationsManager
_orm_id: int
_orm_saved: bool
_related_names: Optional[Set]
_through_names: Optional[Set]
_related_names_hash: str
_quick_access_fields: Set
_json_fields: Set
_bytes_fields: Set
ormar_config: OrmarConfig
# noinspection PyMissingConstructor
def __init__(self, *args: Any, **kwargs: Any) -> None: # type: ignore
"""
Initializer that creates a new ormar Model that is also pydantic Model at the
same time.
Passed keyword arguments can be only field names and their corresponding values
as those will be passed to pydantic validation that will complain if extra
params are passed.
If relations are defined each relation is expanded and children models are also
initialized and validated. Relation from both sides is registered so you can
access related models from both sides.
Json fields are automatically loaded/dumped if needed.
Models marked as abstract=True in internal OrmarConfig cannot be initialized.
Accepts also special __pk_only__ flag that indicates that Model is constructed
only with primary key value (so no other fields, it's a child model on other
Model), that causes skipping the validation, that's the only case when the
validation can be skipped.
Accepts also special __excluded__ parameter that contains a set of fields that
should be explicitly set to None, as otherwise pydantic will try to populate
them with their default values if default is set.
:raises ModelError: if abstract model is initialized, model has ForwardRefs
that has not been updated or unknown field is passed
:param args: ignored args
:type args: Any
:param kwargs: keyword arguments - all fields values and some special params
:type kwargs: Any
"""
self._verify_model_can_be_initialized()
self._initialize_internal_attributes()
pk_only = kwargs.pop("__pk_only__", False)
object.__setattr__(self, "__pk_only__", pk_only)
new_kwargs, through_tmp_dict = self._process_kwargs(kwargs)
if not pk_only:
self.__pydantic_validator__.validate_python(
new_kwargs, self_instance=self # type: ignore
)
else:
fields_set = {self.ormar_config.pkname}
values = new_kwargs
object.__setattr__(self, "__dict__", values)
object.__setattr__(self, "__pydantic_fields_set__", fields_set)
# add back through fields
new_kwargs.update(through_tmp_dict)
model_fields = object.__getattribute__(self, "ormar_config").model_fields
# register the columns models after initialization
for related in self.extract_related_names().union(self.extract_through_names()):
model_fields[related].expand_relationship(
new_kwargs.get(related), self, to_register=True
)
def __setattr__(self, name: str, value: Any) -> None: # noqa CCR001
"""
Overwrites setattr in pydantic parent as otherwise descriptors are not called.
:param name: name of the attribute to set
:type name: str
:param value: value of the attribute to set
:type value: Any
:return: None
:rtype: None
"""
prev_hash = hash(self)
if hasattr(self, name):
object.__setattr__(self, name, value)
else:
# let pydantic handle errors for unknown fields
super().__setattr__(name, value)
# In this case, the hash could have changed, so update it
if name == self.ormar_config.pkname or self.pk is None:
object.__setattr__(self, "__cached_hash__", None)
new_hash = hash(self)
if prev_hash != new_hash:
self._update_relation_cache(prev_hash, new_hash)
def __getattr__(self, item: str) -> Any:
"""
Used for private attributes of pydantic v2.
:param item: name of attribute
:type item: str
:return: Any
:rtype: Any
"""
# TODO: Check __pydantic_extra__
if item == "__pydantic_extra__":
return None
return super().__getattr__(item) # type: ignore
def __getstate__(self) -> Dict[Any, Any]:
state = super().__getstate__()
self_dict = self.model_dump()
state["__dict__"].update(**self_dict)
return state
def __setstate__(self, state: Dict[Any, Any]) -> None:
relations = {
k: v
for k, v in state["__dict__"].items()
if k in self.extract_related_names()
}
basic_state = {
k: v
for k, v in state["__dict__"].items()
if k not in self.extract_related_names()
}
state["__dict__"] = basic_state
super().__setstate__(state)
self._initialize_internal_attributes()
for name, value in relations.items():
setattr(self, name, value)
def _update_relation_cache(self, prev_hash: int, new_hash: int) -> None:
"""
Update all relation proxy caches with different hash if we have changed
:param prev_hash: The previous hash to update
:type prev_hash: int
:param new_hash: The hash to update to
:type new_hash: int
"""
def _update_cache(relations: List[Relation], recurse: bool = True) -> None:
for relation in relations:
relation_proxy = relation.get()
if hasattr(relation_proxy, "update_cache"):
relation_proxy.update_cache(prev_hash, new_hash) # type: ignore
elif recurse and hasattr(relation_proxy, "_orm"):
_update_cache(
relation_proxy._orm._relations.values(), # type: ignore
recurse=False,
)
_update_cache(list(self._orm._relations.values()))
def _internal_set(self, name: str, value: Any) -> None:
"""
Delegates call to pydantic.
:param name: name of param
:type name: str
:param value: value to set
:type value: Any
"""
super().__setattr__(name, value)
def _verify_model_can_be_initialized(self) -> None:
"""
Raises exception if model is abstract or has ForwardRefs in relation fields.
:return: None
:rtype: None
"""
if self.ormar_config.abstract:
raise ModelError(f"You cannot initialize abstract model {self.get_name()}")
if self.ormar_config.requires_ref_update:
raise ModelError(
f"Model {self.get_name()} has not updated "
f"ForwardRefs. \nBefore using the model you "
f"need to call update_forward_refs()."
)
def _process_kwargs(self, kwargs: Dict) -> Tuple[Dict, Dict]: # noqa: CCR001
"""
Initializes nested models.
Removes property_fields
Checks if field is in the model fields or pydantic fields.
Nullifies fields that should be excluded.
Extracts through models from kwargs into temporary dict.
:param kwargs: passed to init keyword arguments
:type kwargs: Dict
:return: modified kwargs
:rtype: Tuple[Dict, Dict]
"""
property_fields = self.ormar_config.property_fields
model_fields = self.ormar_config.model_fields
pydantic_fields = set(self.__class__.model_fields.keys())
# remove property fields
for prop_filed in property_fields:
kwargs.pop(prop_filed, None)
excluded: Set[str] = kwargs.pop("__excluded__", set())
if "pk" in kwargs:
kwargs[self.ormar_config.pkname] = kwargs.pop("pk")
# extract through fields
through_tmp_dict = dict()
for field_name in self.extract_through_names():
through_tmp_dict[field_name] = kwargs.pop(field_name, None)
kwargs = self._remove_extra_parameters_if_they_should_be_ignored(
kwargs=kwargs, model_fields=model_fields, pydantic_fields=pydantic_fields
)
try:
new_kwargs: Dict[str, Any] = {
k: self._convert_to_bytes(
k,
self._convert_json(
k,
(
model_fields[k].expand_relationship(
v, self, to_register=False
)
if k in model_fields
else (v if k in pydantic_fields else model_fields[k])
),
),
)
for k, v in kwargs.items()
}
except KeyError as e:
raise ModelError(
f"Unknown field '{e.args[0]}' for model {self.get_name(lower=False)}"
)
# explicitly set None to excluded fields
# as pydantic populates them with default if set
for field_to_nullify in excluded:
new_kwargs[field_to_nullify] = None
return new_kwargs, through_tmp_dict
def _remove_extra_parameters_if_they_should_be_ignored(
self, kwargs: Dict, model_fields: Dict, pydantic_fields: Set
) -> Dict:
"""
Removes the extra fields from kwargs if they should be ignored.
:param kwargs: passed arguments
:type kwargs: Dict
:param model_fields: dictionary of model fields
:type model_fields: Dict
:param pydantic_fields: set of pydantic fields names
:type pydantic_fields: Set
:return: dict without extra fields
:rtype: Dict
"""
if self.ormar_config.extra == Extra.ignore:
kwargs = {
k: v
for k, v in kwargs.items()
if k in model_fields or k in pydantic_fields
}
return kwargs
def _initialize_internal_attributes(self) -> None:
"""
Initializes internal attributes during __init__()
:rtype: None
"""
# object.__setattr__(self, "_orm_id", uuid.uuid4().hex)
object.__setattr__(self, "_orm_saved", False)
object.__setattr__(self, "_pk_column", None)
object.__setattr__(
self,
"_orm",
RelationsManager(
related_fields=self.extract_related_fields(), owner=cast("Model", self)
),
)
def __eq__(self, other: object) -> bool:
"""
Compares other model to this model. when == is called.
:param other: other model to compare
:type other: object
:return: result of comparison
:rtype: bool
"""
if isinstance(other, NewBaseModel):
return self.__same__(other)
return super().__eq__(other) # pragma no cover
def __hash__(self) -> int:
if getattr(self, "__cached_hash__", None) is not None:
return self.__cached_hash__ or 0
if self.pk is not None:
ret = hash(str(self.pk) + self.__class__.__name__)
else:
vals = {
k: v
for k, v in self.__dict__.items()
if k not in self.extract_related_names()
}
ret = hash(str(vals) + self.__class__.__name__)
object.__setattr__(self, "__cached_hash__", ret)
return ret
def __same__(self, other: "NewBaseModel") -> bool:
"""
Used by __eq__, compares other model to this model.
Compares:
* _orm_ids,
* primary key values if it's set
* dictionary of own fields (excluding relations)
:param other: model to compare to
:type other: NewBaseModel
:return: result of comparison
:rtype: bool
"""
if (self.pk is None and other.pk is not None) or (
self.pk is not None and other.pk is None
):
return False
else:
return hash(self) == other.__hash__()
@classmethod
def get_name(cls, lower: bool = True) -> str:
"""
Returns name of the Model class, by default lowercase.
:param lower: flag if name should be set to lowercase
:type lower: bool
:return: name of the model
:rtype: str
"""
name = cls.__name__
if lower:
name = name.lower()
return name
@property
def pk_column(self) -> sqlalchemy.Column:
"""
Retrieves primary key sqlalchemy column from models OrmarConfig.table.
Each model has to have primary key.
Only one primary key column is allowed.
:return: primary key sqlalchemy column
:rtype: sqlalchemy.Column
"""
if object.__getattribute__(self, "_pk_column") is not None:
return object.__getattribute__(self, "_pk_column")
pk_columns = self.ormar_config.table.primary_key.columns.values()
pk_col = pk_columns[0]
object.__setattr__(self, "_pk_column", pk_col)
return pk_col
@property
def saved(self) -> bool:
"""Saved status of the model. Changed by setattr and loading from db"""
return self._orm_saved
@property
def signals(self) -> "SignalEmitter":
"""Exposes signals from model OrmarConfig"""
return self.ormar_config.signals
@classmethod
def pk_type(cls) -> Any:
"""Shortcut to models primary key field type"""
return cls.ormar_config.model_fields[cls.ormar_config.pkname].__type__
@classmethod
def db_backend_name(cls) -> str:
"""Shortcut to database dialect,
cause some dialect require different treatment"""
return cls.ormar_config.database._backend._dialect.name
def remove(self, parent: "Model", name: str) -> None:
"""Removes child from relation with given name in RelationshipManager"""
self._orm.remove_parent(self, parent, name)
def set_save_status(self, status: bool) -> None:
"""Sets value of the save status"""
object.__setattr__(self, "_orm_saved", status)
@classmethod
def update_forward_refs(cls, **localns: Any) -> None:
"""
Processes fields that are ForwardRef and need to be evaluated into actual
models.
Expands relationships, register relation in alias manager and substitutes
sqlalchemy columns with new ones with proper column type (null before).
Populates OrmarConfig table of the Model which is left empty before.
Sets self_reference flag on models that links to themselves.
Calls the pydantic method to evaluate pydantic fields.
:param localns: local namespace
:type localns: Any
:return: None
:rtype: None
"""
globalns = sys.modules[cls.__module__].__dict__.copy()
globalns.setdefault(cls.__name__, cls)
fields_to_check = cls.ormar_config.model_fields.copy()
for field in fields_to_check.values():
if field.has_unresolved_forward_refs():
field = cast(ForeignKeyField, field)
field.evaluate_forward_ref(globalns=globalns, localns=localns)
field.set_self_reference_flag()
if field.is_multi and not field.through:
field = cast(ormar.ManyToManyField, field)
field.create_default_through_model()
expand_reverse_relationship(model_field=field)
register_relation_in_alias_manager(field=field)
update_column_definition(model=cls, field=field)
populate_config_sqlalchemy_table_if_required(config=cls.ormar_config)
# super().update_forward_refs(**localns)
cls.model_rebuild(
force=True,
_types_namespace={
field.to.__name__: field.to
for field in fields_to_check.values()
if field.is_relation
},
)
cls.ormar_config.requires_ref_update = False
@staticmethod
def _get_not_excluded_fields(
fields: Union[List, Set], include: Optional[Dict], exclude: Optional[Dict]
) -> List:
"""
Returns related field names applying on them include and exclude set.
:param include: fields to include
:type include: Union[Set, Dict, None]
:param exclude: fields to exclude
:type exclude: Union[Set, Dict, None]
:return:
:rtype: List of fields with relations that is not excluded
"""
fields = [*fields] if not isinstance(fields, list) else fields
if include:
fields = [field for field in fields if field in include]
if exclude:
fields = [
field
for field in fields
if field not in exclude
or (
exclude.get(field) is not Ellipsis
and exclude.get(field) != {"__all__"}
)
]
return fields
@staticmethod
def _extract_nested_models_from_list(
relation_map: Dict,
models: MutableSequence,
include: Union[Set, Dict, None],
exclude: Union[Set, Dict, None],
exclude_primary_keys: bool,
exclude_through_models: bool,
) -> List:
"""
Converts list of models into list of dictionaries.
:param models: List of models
:type models: List
:param include: fields to include
:type include: Union[Set, Dict, None]
:param exclude: fields to exclude
:type exclude: Union[Set, Dict, None]
:return: list of models converted to dictionaries
:rtype: List[Dict]
"""
result = []
for model in models:
try:
model_dict = model.model_dump(
relation_map=relation_map,
include=include,
exclude=exclude,
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
)
if not exclude_through_models:
model.populate_through_models(
model=model,
model_dict=model_dict,
include=include,
exclude=exclude,
relation_map=relation_map,
)
result.append(model_dict)
except ReferenceError: # pragma no cover
continue
return result
@staticmethod
def populate_through_models(
model: "Model",
model_dict: Dict,
include: Union[Set, Dict],
exclude: Union[Set, Dict],
relation_map: Dict,
) -> None:
"""
Populates through models with values from dict representation.
:param model: model to populate through models
:type model: Model
:param model_dict: dict representation of the model
:type model_dict: Dict
:param include: fields to include
:type include: Dict
:param exclude: fields to exclude
:type exclude: Dict
:param relation_map: map of relations to follow to avoid circular refs
:type relation_map: Dict
:return: None
:rtype: None
"""
include_dict = (
translate_list_to_dict(include)
if (include and isinstance(include, Set))
else include
)
exclude_dict = (
translate_list_to_dict(exclude)
if (exclude and isinstance(exclude, Set))
else exclude
)
models_to_populate = model._get_not_excluded_fields(
fields=model.extract_through_names(),
include=cast(Optional[Dict], include_dict),
exclude=cast(Optional[Dict], exclude_dict),
)
through_fields_to_populate = [
model.ormar_config.model_fields[through_model]
for through_model in models_to_populate
if model.ormar_config.model_fields[through_model].related_name
not in relation_map
]
for through_field in through_fields_to_populate:
through_instance = getattr(model, through_field.name)
if through_instance:
model_dict[through_field.name] = through_instance.model_dump()
@classmethod
def _skip_ellipsis(
cls, items: Union[Set, Dict, None], key: str, default_return: Any = None
) -> Union[Set, Dict, None]:
"""
Helper to traverse the include/exclude dictionaries.
In model_dump() Ellipsis should be skipped as it indicates all fields required
and not the actual set/dict with fields names.
:param items: current include/exclude value
:type items: Union[Set, Dict, None]
:param key: key for nested relations to check
:type key: str
:return: nested value of the items
:rtype: Union[Set, Dict, None]
"""
result = cls.get_child(items, key)
return result if result is not Ellipsis else default_return
@staticmethod
def _convert_all(items: Union[Set, Dict, None]) -> Union[Set, Dict, None]:
"""
Helper to convert __all__ pydantic special index to ormar which does not
support index based exclusions.
:param items: current include/exclude value
:type items: Union[Set, Dict, None]
"""
if isinstance(items, dict) and "__all__" in items:
return items.get("__all__")
return items
def _extract_nested_models( # noqa: CCR001, CFQ002
self,
relation_map: Dict,
dict_instance: Dict,
include: Optional[Dict],
exclude: Optional[Dict],
exclude_primary_keys: bool,
exclude_through_models: bool,
exclude_list: bool,
) -> Dict:
"""
Traverse nested models and converts them into dictionaries.
Calls itself recursively if needed.
:param nested: flag if current instance is nested
:type nested: bool
:param dict_instance: current instance dict
:type dict_instance: Dict
:param include: fields to include
:type include: Optional[Dict]
:param exclude: fields to exclude
:type exclude: Optional[Dict]
:param exclude: whether to exclude lists
:type exclude: bool
:return: current model dict with child models converted to dictionaries
:rtype: Dict
"""
fields = self._get_not_excluded_fields(
fields=self.extract_related_names(), include=include, exclude=exclude
)
for field in fields:
if not relation_map or field not in relation_map:
continue
try:
nested_model = getattr(self, field)
if isinstance(nested_model, MutableSequence):
if exclude_list:
continue
dict_instance[field] = self._extract_nested_models_from_list(
relation_map=self._skip_ellipsis( # type: ignore
relation_map, field, default_return=dict()
),
models=nested_model,
include=self._convert_all(self._skip_ellipsis(include, field)),
exclude=self._convert_all(self._skip_ellipsis(exclude, field)),
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
)
elif nested_model is not None:
model_dict = nested_model.model_dump(
relation_map=self._skip_ellipsis(
relation_map, field, default_return=dict()
),
include=self._convert_all(self._skip_ellipsis(include, field)),
exclude=self._convert_all(self._skip_ellipsis(exclude, field)),
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
)
if not exclude_through_models:
nested_model.populate_through_models(
model=nested_model,
model_dict=model_dict,
include=self._convert_all(
self._skip_ellipsis(include, field)
),
exclude=self._convert_all(
self._skip_ellipsis(exclude, field)
),
relation_map=self._skip_ellipsis(
relation_map, field, default_return=dict()
),
)
dict_instance[field] = model_dict
else:
dict_instance[field] = None
except ReferenceError: # pragma: no cover
dict_instance[field] = None
return dict_instance
@typing_extensions.deprecated(
"The `dict` method is deprecated; use `model_dump` instead.",
category=OrmarDeprecatedSince020,
)
def dict( # type: ignore # noqa A003
self,
*,
include: Union[Set, Dict, None] = None,
exclude: Union[Set, Dict, None] = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
exclude_primary_keys: bool = False,
exclude_through_models: bool = False,
exclude_list: bool = False,
relation_map: Optional[Dict] = None,
) -> "DictStrAny": # noqa: A003 # pragma: no cover
warnings.warn(
"The `dict` method is deprecated; use `model_dump` instead.",
DeprecationWarning,
)
return self.model_dump(
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
exclude_list=exclude_list,
relation_map=relation_map,
)
def model_dump( # type: ignore # noqa A003
self,
*,
mode: Union[Literal["json", "python"], str] = "python",
include: Union[Set, Dict, None] = None,
exclude: Union[Set, Dict, None] = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
exclude_primary_keys: bool = False,
exclude_through_models: bool = False,
exclude_list: bool = False,
relation_map: Optional[Dict] = None,
round_trip: bool = False,
warnings: bool = True,
) -> "DictStrAny": # noqa: A003'
"""
Generate a dictionary representation of the model,
optionally specifying which fields to include or exclude.
Nested models are also parsed to dictionaries.
Additionally, fields decorated with @property_field are also added.
:param exclude_through_models: flag to exclude through models from dict
:type exclude_through_models: bool
:param exclude_primary_keys: flag to exclude primary keys from dict
:type exclude_primary_keys: bool
:param include: fields to include
:type include: Union[Set, Dict, None]
:param exclude: fields to exclude
:type exclude: Union[Set, Dict, None]
:param by_alias: flag to get values by alias - passed to pydantic
:type by_alias: bool
:param exclude_unset: flag to exclude not set values - passed to pydantic
:type exclude_unset: bool
:param exclude_defaults: flag to exclude default values - passed to pydantic
:type exclude_defaults: bool
:param exclude_none: flag to exclude None values - passed to pydantic
:type exclude_none: bool
:param exclude_list: flag to exclude lists of nested values models from dict
:type exclude_list: bool
:param relation_map: map of the relations to follow to avoid circular deps
:type relation_map: Dict
:param mode: The mode in which `to_python` should run.
If mode is 'json', the dictionary will only contain JSON serializable types.
If mode is 'python', the dictionary may contain any Python objects.
:type mode: str
:param round_trip: flag to enable serialization round-trip support
:type round_trip: bool
:param warnings: flag to log warnings for invalid fields
:type warnings: bool
:return:
:rtype:
"""
pydantic_exclude = self._update_excluded_with_related(exclude)
pydantic_exclude = self._update_excluded_with_pks_and_through(
exclude=pydantic_exclude,
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
)
dict_instance = super().model_dump(
mode=mode,
include=include,
exclude=pydantic_exclude,
by_alias=by_alias,
exclude_defaults=exclude_defaults,
exclude_unset=exclude_unset,
exclude_none=exclude_none,
round_trip=round_trip,
warnings=False,
)
dict_instance = {
k: self._convert_bytes_to_str(column_name=k, value=v)
for k, v in dict_instance.items()
}
include_dict = (
translate_list_to_dict(include) if isinstance(include, Set) else include
)
exclude_dict = (
translate_list_to_dict(exclude) if isinstance(exclude, Set) else exclude
)
relation_map = (
relation_map
if relation_map is not None
else translate_list_to_dict(self._iterate_related_models())
)
pk_only = getattr(self, "__pk_only__", False)
if relation_map and not pk_only:
dict_instance = self._extract_nested_models(
relation_map=relation_map,
dict_instance=dict_instance,
include=include_dict,
exclude=exclude_dict,
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
exclude_list=exclude_list,
)
return dict_instance
@typing_extensions.deprecated(
"The `json` method is deprecated; use `model_dump_json` instead.",
category=OrmarDeprecatedSince020,
)
def json( # type: ignore # noqa A003
self,
*,
include: Union[Set, Dict, None] = None,
exclude: Union[Set, Dict, None] = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
exclude_primary_keys: bool = False,
exclude_through_models: bool = False,
**dumps_kwargs: Any,
) -> str: # pragma: no cover
warnings.warn(
"The `json` method is deprecated; use `model_dump_json` instead.",
DeprecationWarning,
)
return self.model_dump_json(
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
**dumps_kwargs,
)
def model_dump_json( # type: ignore # noqa A003
self,
*,
include: Union[Set, Dict, None] = None,
exclude: Union[Set, Dict, None] = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
exclude_primary_keys: bool = False,
exclude_through_models: bool = False,
**dumps_kwargs: Any,
) -> str:
"""
Generate a JSON representation of the model, `include` and `exclude`
arguments as per `dict()`.
`encoder` is an optional function to supply as `default` to json.dumps(),
other arguments as per `json.dumps()`.
"""
data = self.model_dump(
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
)
return self.__pydantic_serializer__.to_json(data, warnings=False).decode()
@classmethod
@typing_extensions.deprecated(
"The `construct` method is deprecated; use `model_construct` instead.",
category=OrmarDeprecatedSince020,
)
def construct(
cls: Type["T"], _fields_set: Union[Set[str], None] = None, **values: Any
) -> "T": # pragma: no cover
warnings.warn(
"The `construct` method is deprecated; use `model_construct` instead.",
DeprecationWarning,
)
return cls.model_construct(_fields_set=_fields_set, **values)
@classmethod
def model_construct(
cls: Type["T"], _fields_set: Optional["SetStr"] = None, **values: Any
) -> "T":
own_values = {
k: v for k, v in values.items() if k not in cls.extract_related_names()
}
model = cls.__new__(cls)
fields_values: Dict[str, Any] = {}
for name, field in cls.model_fields.items():
if name in own_values:
fields_values[name] = own_values[name]
elif not field.is_required():
fields_values[name] = field.get_default()
fields_values.update(own_values)
if _fields_set is None:
_fields_set = set(values.keys())
extra_allowed = cls.model_config.get("extra") == "allow"
if not extra_allowed:
fields_values.update(values)
object.__setattr__(model, "__dict__", fields_values)
model._initialize_internal_attributes()
cls._construct_relations(model=model, values=values)
object.__setattr__(model, "__pydantic_fields_set__", _fields_set)
return cls._pydantic_model_construct_finalizer(
model=model, extra_allowed=extra_allowed, values=values
)
@classmethod
def _pydantic_model_construct_finalizer(
cls: Type["T"], model: "T", extra_allowed: bool, **values: Any
) -> "T":
"""
Recreate pydantic model_construct logic here as we do not call super method.
"""
_extra: Union[Dict[str, Any], None] = None
if extra_allowed: # pragma: no cover
_extra = {}
for k, v in values.items():
_extra[k] = v
if not cls.__pydantic_root_model__:
object.__setattr__(model, "__pydantic_extra__", _extra)
if cls.__pydantic_post_init__: # pragma: no cover
model.model_post_init(None)
elif not cls.__pydantic_root_model__:
# Note: if there are any private attributes,
# cls.__pydantic_post_init__ would exist
# Since it doesn't, that means that `__pydantic_private__`
# should be set to None
object.__setattr__(model, "__pydantic_private__", None)
return model
@classmethod
def _construct_relations(cls: Type["T"], model: "T", values: Dict) -> None:
present_relations = [
relation for relation in cls.extract_related_names() if relation in values
]
for relation in present_relations:
value_to_set = values[relation]
if not isinstance(value_to_set, list):
value_to_set = [value_to_set]
relation_field = cls.ormar_config.model_fields[relation]
relation_value = [
relation_field.expand_relationship(x, model, to_register=False)
for x in value_to_set
if x is not None
]
for child in relation_value:
model._orm.add(
parent=cast("Model", child),
child=cast("Model", model),
field=cast("ForeignKeyField", relation_field),
)
def update_from_dict(self, value_dict: Dict) -> "NewBaseModel":
"""
Updates self with values of fields passed in the dictionary.
:param value_dict: dictionary of fields names and values
:type value_dict: Dict
:return: self
:rtype: NewBaseModel
"""
for key, value in value_dict.items():
setattr(self, key, value)
return self
def _convert_to_bytes(self, column_name: str, value: Any) -> Union[str, Dict]:
"""
Converts value to bytes from string
:param column_name: name of the field
:type column_name: str
:param value: value fo the field
:type value: Any
:return: converted value if needed, else original value
:rtype: Any
"""
if column_name not in self._bytes_fields:
return value
field = self.ormar_config.model_fields[column_name]
if value is not None:
value = decode_bytes(
value=value, represent_as_string=field.represent_as_base64_str
)
return value
def _convert_bytes_to_str(self, column_name: str, value: Any) -> Union[str, Dict]:
"""
Converts value to str from bytes for represent_as_base64_str columns.
:param column_name: name of the field
:type column_name: str
:param value: value fo the field
:type value: Any
:return: converted value if needed, else original value
:rtype: Any
"""
if column_name not in self._bytes_fields:
return value
field = self.ormar_config.model_fields[column_name]
if (
value is not None
and not isinstance(value, str)
and field.represent_as_base64_str
):
return base64.b64encode(value).decode()
return value
def _convert_json(self, column_name: str, value: Any) -> Union[str, Dict, None]:
"""
Converts value to/from json if needed (for Json columns).
:param column_name: name of the field
:type column_name: str
:param value: value fo the field
:type value: Any
:return: converted value if needed, else original value
:rtype: Any
"""
if column_name not in self._json_fields:
return value
return encode_json(value)
def _extract_own_model_fields(self) -> Dict:
"""
Returns a dictionary with field names and values for fields that are not
relations fields (ForeignKey, ManyToMany etc.)
:return: dictionary of fields names and values.
:rtype: Dict
"""
related_names = self.extract_related_names()
self_fields = {k: v for k, v in self.__dict__.items() if k not in related_names}
return self_fields
def _extract_model_db_fields(self) -> Dict:
"""
Returns a dictionary with field names and values for fields that are stored in
current model's table.
That includes own non-relational fields ang foreign key fields.
:return: dictionary of fields names and values.
:rtype: Dict
"""
self_fields = self._extract_own_model_fields()
self_fields = {
k: v
for k, v in self_fields.items()
if self.get_column_alias(k) in self.ormar_config.table.columns
}
for field in self._extract_db_related_names():
relation_field = self.ormar_config.model_fields[field]
target_pk_name = relation_field.to.ormar_config.pkname
target_field = getattr(self, field)
self_fields[field] = getattr(target_field, target_pk_name, None)
if not relation_field.nullable and not self_fields[field]:
raise ModelPersistenceError(
f"You cannot save {relation_field.to.get_name()} "
f"model without pk set!"
)
return self_fields
collerek-ormar-c09209a/ormar/models/ormar_config.py 0000664 0000000 0000000 00000006006 15130200524 0022375 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Dict, List, Optional, Set, Type, Union
import databases
import sqlalchemy
from sqlalchemy.sql.schema import ColumnCollectionConstraint
from ormar.fields import BaseField, ForeignKeyField, ManyToManyField
from ormar.models.helpers import alias_manager
from ormar.models.utils import Extra
from ormar.queryset.queryset import QuerySet
from ormar.relations import AliasManager
from ormar.signals import SignalEmitter
class OrmarConfig:
if TYPE_CHECKING: # pragma: no cover
pkname: str
metadata: sqlalchemy.MetaData
database: databases.Database
tablename: str
order_by: List[str]
abstract: bool
exclude_parent_fields: List[str]
constraints: List[ColumnCollectionConstraint]
def __init__(
self,
metadata: Optional[sqlalchemy.MetaData] = None,
database: Optional[databases.Database] = None,
engine: Optional[sqlalchemy.engine.Engine] = None,
tablename: Optional[str] = None,
order_by: Optional[List[str]] = None,
abstract: bool = False,
queryset_class: Type[QuerySet] = QuerySet,
extra: Extra = Extra.forbid,
constraints: Optional[List[ColumnCollectionConstraint]] = None,
) -> None:
self.pkname = None # type: ignore
self.metadata = metadata # type: ignore
self.database = database # type: ignore
self.engine = engine # type: ignore
self.tablename = tablename # type: ignore
self.orders_by = order_by or []
self.columns: List[sqlalchemy.Column] = []
self.constraints = constraints or []
self.model_fields: Dict[
str, Union[BaseField, ForeignKeyField, ManyToManyField]
] = {}
self.alias_manager: AliasManager = alias_manager
self.property_fields: Set = set()
self.signals: SignalEmitter = SignalEmitter()
self.abstract = abstract
self.requires_ref_update: bool = False
self.extra = extra
self.queryset_class = queryset_class
self.table: sqlalchemy.Table = None # type: ignore
def copy(
self,
metadata: Optional[sqlalchemy.MetaData] = None,
database: Optional[databases.Database] = None,
engine: Optional[sqlalchemy.engine.Engine] = None,
tablename: Optional[str] = None,
order_by: Optional[List[str]] = None,
abstract: Optional[bool] = None,
queryset_class: Optional[Type[QuerySet]] = None,
extra: Optional[Extra] = None,
constraints: Optional[List[ColumnCollectionConstraint]] = None,
) -> "OrmarConfig":
return OrmarConfig(
metadata=metadata or self.metadata,
database=database or self.database,
engine=engine or self.engine,
tablename=tablename,
order_by=order_by,
abstract=abstract or self.abstract,
queryset_class=queryset_class or self.queryset_class,
extra=extra or self.extra,
constraints=constraints,
)
collerek-ormar-c09209a/ormar/models/quick_access_views.py 0000664 0000000 0000000 00000003557 15130200524 0023612 0 ustar 00root root 0000000 0000000 """
Contains set of fields/methods etc names that are used to bypass the checks in
NewBaseModel __getattribute__ calls to speed the calls.
"""
quick_access_set = {
"Config",
"model_config",
"model_fields",
"__cached_hash__",
"__class__",
"__config__",
"__custom_root_type__",
"__dict__",
"model_fields",
"__fields_set__",
"__json_encoder__",
"__pk_only__",
"__post_root_validators__",
"__pre_root_validators__",
"__private_attributes__",
"__same__",
"_calculate_keys",
"_convert_json",
"_extract_db_related_names",
"_extract_model_db_fields",
"_extract_nested_models",
"_extract_nested_models_from_list",
"_extract_own_model_fields",
"_extract_related_model_instead_of_field",
"_get_not_excluded_fields",
"_get_value",
"_init_private_attributes",
"_is_conversion_to_json_needed",
"_iter",
"_iterate_related_models",
"_orm",
"_orm_id",
"_orm_saved",
"_related_names",
"_skip_ellipsis",
"_update_and_follow",
"_update_excluded_with_related_not_required",
"_verify_model_can_be_initialized",
"copy",
"delete",
"dict",
"extract_related_names",
"extract_related_fields",
"extract_through_names",
"update_from_dict",
"get_child",
"get_column_alias",
"get_column_name_from_alias",
"get_filtered_names_to_extract",
"get_name",
"get_properties",
"get_related_field_name",
"get_relation_model_id",
"json",
"keys",
"load",
"load_all",
"pk_column",
"pk_type",
"populate_default_values",
"prepare_model_to_save",
"remove",
"resolve_relation_field",
"resolve_relation_name",
"save",
"save_related",
"saved",
"set_save_status",
"signals",
"translate_aliases_to_columns",
"translate_columns_to_aliases",
"update",
"upsert",
}
collerek-ormar-c09209a/ormar/models/traversible.py 0000664 0000000 0000000 00000007415 15130200524 0022257 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Any, List, Optional, Type
if TYPE_CHECKING: # pragma no cover
from ormar.models.mixins.relation_mixin import RelationMixin
class NodeList:
"""
Helper class that helps with iterating nested models
"""
def __init__(self) -> None:
self.node_list: List["Node"] = []
def __getitem__(self, item: Any) -> Any:
return self.node_list.__getitem__(item)
def add(
self,
node_class: Type["RelationMixin"],
relation_name: Optional[str] = None,
parent_node: Optional["Node"] = None,
) -> "Node":
"""
Adds new Node or returns the existing one
:param node_class: Model in current node
:type node_class: ormar.models.metaclass.ModelMetaclass
:param relation_name: name of the current relation
:type relation_name: str
:param parent_node: parent node
:type parent_node: Optional[Node]
:return: returns new or already existing node
:rtype: Node
"""
existing_node = self.find(
relation_name=relation_name, node_class=node_class, parent_node=parent_node
)
if not existing_node:
current_node = Node(
node_class=node_class,
relation_name=relation_name,
parent_node=parent_node,
)
self.node_list.append(current_node)
return current_node
return existing_node # pragma: no cover
def find(
self,
node_class: Type["RelationMixin"],
relation_name: Optional[str] = None,
parent_node: Optional["Node"] = None,
) -> Optional["Node"]:
"""
Searches for existing node with given parameters
:param node_class: Model in current node
:type node_class: ormar.models.metaclass.ModelMetaclass
:param relation_name: name of the current relation
:type relation_name: str
:param parent_node: parent node
:type parent_node: Optional[Node]
:return: returns already existing node or None
:rtype: Optional[Node]
"""
for node in self.node_list:
if (
node.node_class == node_class
and node.parent_node == parent_node
and node.relation_name == relation_name
):
return node # pragma: no cover
return None
class Node:
def __init__(
self,
node_class: Type["RelationMixin"],
relation_name: Optional[str] = None,
parent_node: Optional["Node"] = None,
) -> None:
self.relation_name = relation_name
self.node_class = node_class
self.parent_node = parent_node
self.visited_children: List["Node"] = []
if self.parent_node:
self.parent_node.visited_children.append(self)
def __repr__(self) -> str: # pragma: no cover
return (
f"{self.node_class.get_name(lower=False)}, "
f"relation:{self.relation_name}, "
f"parent: {self.parent_node}"
)
def visited(self, relation_name: str) -> bool:
"""
Checks if given relation was already visited.
Relation was visited if it's name is in current node children.
Relation was visited if one of the parent node had the same Model class
:param relation_name: name of relation
:type relation_name: str
:return: result of the check
:rtype: bool
"""
target_model = self.node_class.ormar_config.model_fields[relation_name].to
if self.parent_node:
node = self
while node.parent_node:
node = node.parent_node
if node.node_class == target_model:
return True
return False
collerek-ormar-c09209a/ormar/models/utils.py 0000664 0000000 0000000 00000000134 15130200524 0021064 0 ustar 00root root 0000000 0000000 from enum import Enum
class Extra(str, Enum):
ignore = "ignore"
forbid = "forbid"
collerek-ormar-c09209a/ormar/protocols/ 0000775 0000000 0000000 00000000000 15130200524 0020115 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/protocols/__init__.py 0000664 0000000 0000000 00000000262 15130200524 0022226 0 ustar 00root root 0000000 0000000 from ormar.protocols.queryset_protocol import QuerySetProtocol
from ormar.protocols.relation_protocol import RelationProtocol
__all__ = ["QuerySetProtocol", "RelationProtocol"]
collerek-ormar-c09209a/ormar/protocols/queryset_protocol.py 0000664 0000000 0000000 00000003516 15130200524 0024276 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Set, Tuple, Union
try:
from typing import Protocol
except ImportError: # pragma: nocover
from typing_extensions import Protocol # type: ignore
if TYPE_CHECKING: # noqa: C901; #pragma nocover
from ormar import Model
from ormar.relations.querysetproxy import QuerysetProxy
class QuerySetProtocol(Protocol): # pragma: nocover
def filter(self, **kwargs: Any) -> "QuerysetProxy": # noqa: A003, A001
...
def exclude(self, **kwargs: Any) -> "QuerysetProxy": # noqa: A003, A001
...
def select_related(self, related: Union[List, str]) -> "QuerysetProxy": ...
def prefetch_related(self, related: Union[List, str]) -> "QuerysetProxy": ...
async def exists(self) -> bool: ...
async def count(self, distinct: bool = True) -> int: ...
async def clear(self) -> int: ...
def limit(self, limit_count: int) -> "QuerysetProxy": ...
def offset(self, offset: int) -> "QuerysetProxy": ...
async def first(self, **kwargs: Any) -> "Model": ...
async def get(self, **kwargs: Any) -> "Model": ...
async def all( # noqa: A003, A001
self, **kwargs: Any
) -> Sequence[Optional["Model"]]: ...
async def create(self, **kwargs: Any) -> "Model": ...
async def update(self, each: bool = False, **kwargs: Any) -> int: ...
async def get_or_create(
self,
_defaults: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> Tuple["Model", bool]: ...
async def update_or_create(self, **kwargs: Any) -> "Model": ...
def fields(self, columns: Union[List, str, Set, Dict]) -> "QuerysetProxy": ...
def exclude_fields(
self, columns: Union[List, str, Set, Dict]
) -> "QuerysetProxy": ...
def order_by(self, columns: Union[List, str]) -> "QuerysetProxy": ...
collerek-ormar-c09209a/ormar/protocols/relation_protocol.py 0000664 0000000 0000000 00000000647 15130200524 0024234 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Type, Union
try:
from typing import Protocol
except ImportError: # pragma: nocover
from typing_extensions import Protocol # type: ignore
if TYPE_CHECKING: # pragma: nocover
from ormar import Model
class RelationProtocol(Protocol): # pragma: nocover
def add(self, child: "Model") -> None: ...
def remove(self, child: Union["Model", Type["Model"]]) -> None: ...
collerek-ormar-c09209a/ormar/py.typed 0000664 0000000 0000000 00000000000 15130200524 0017556 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/queryset/ 0000775 0000000 0000000 00000000000 15130200524 0017752 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/queryset/__init__.py 0000664 0000000 0000000 00000001141 15130200524 0022060 0 ustar 00root root 0000000 0000000 """
Contains QuerySet and different Query classes to allow for constructing of sql queries.
"""
from ormar.queryset.actions import FilterAction, OrderAction, SelectAction
from ormar.queryset.clause import and_, or_
from ormar.queryset.field_accessor import FieldAccessor
from ormar.queryset.queries import FilterQuery, LimitQuery, OffsetQuery, OrderQuery
from ormar.queryset.queryset import QuerySet
__all__ = [
"QuerySet",
"FilterQuery",
"LimitQuery",
"OffsetQuery",
"OrderQuery",
"FilterAction",
"OrderAction",
"SelectAction",
"and_",
"or_",
"FieldAccessor",
]
collerek-ormar-c09209a/ormar/queryset/actions/ 0000775 0000000 0000000 00000000000 15130200524 0021412 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/queryset/actions/__init__.py 0000664 0000000 0000000 00000000363 15130200524 0023525 0 ustar 00root root 0000000 0000000 from ormar.queryset.actions.filter_action import FilterAction
from ormar.queryset.actions.order_action import OrderAction
from ormar.queryset.actions.select_action import SelectAction
__all__ = ["FilterAction", "OrderAction", "SelectAction"]
collerek-ormar-c09209a/ormar/queryset/actions/filter_action.py 0000664 0000000 0000000 00000012165 15130200524 0024613 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Any, Type
from sqlalchemy import TextClause
import ormar # noqa: I100, I202
from ormar.exceptions import QueryDefinitionError
from ormar.queryset.actions.query_action import QueryAction
if TYPE_CHECKING: # pragma: nocover
from ormar import Model
FILTER_OPERATORS = {
"exact": "__eq__",
"iexact": "ilike",
"contains": "like",
"icontains": "ilike",
"startswith": "like",
"istartswith": "ilike",
"endswith": "like",
"iendswith": "ilike",
"isnull": "is_",
"in": "in_",
"gt": "__gt__",
"gte": "__ge__",
"lt": "__lt__",
"lte": "__le__",
}
METHODS_TO_OPERATORS = {
"__eq__": "exact",
"__mod__": "contains",
"__gt__": "gt",
"__ge__": "gte",
"__lt__": "lt",
"__le__": "lte",
"iexact": "iexact",
"contains": "contains",
"icontains": "icontains",
"startswith": "startswith",
"istartswith": "istartswith",
"endswith": "endswith",
"iendswith": "iendswith",
"isnull": "isnull",
"in": "in",
}
ESCAPE_CHARACTERS = ["%", "_"]
class FilterAction(QueryAction):
"""
Filter Actions is populated by queryset when filter() is called.
All required params are extracted but kept raw until actual filter clause value
is required -> then the action is converted into text() clause.
Extracted in order to easily change table prefixes on complex relations.
"""
def __init__(self, filter_str: str, value: Any, model_cls: Type["Model"]) -> None:
super().__init__(query_str=filter_str, model_cls=model_cls)
self.filter_value = value
self._escape_characters_in_clause()
def has_escaped_characters(self) -> bool:
"""Check if value is a string that contains characters to escape"""
return isinstance(self.filter_value, str) and any(
c for c in ESCAPE_CHARACTERS if c in self.filter_value
)
def _split_value_into_parts(self, query_str: str) -> None:
parts = query_str.split("__")
if parts[-1] in FILTER_OPERATORS:
self.operator = parts[-1]
self.field_name = parts[-2]
self.related_parts = parts[:-2]
else:
self.operator = "exact"
self.field_name = parts[-1]
self.related_parts = parts[:-1]
def _escape_characters_in_clause(self) -> None:
"""
Escapes the special characters ["%", "_"] if needed.
Adds `%` for `like` queries.
:raises QueryDefinitionError: if contains or icontains is used with
ormar model instance
:return: escaped value and flag if escaping is needed
:rtype: Tuple[Any, bool]
"""
self.has_escaped_character = False
if self.operator in [
"contains",
"icontains",
"startswith",
"istartswith",
"endswith",
"iendswith",
]:
if isinstance(self.filter_value, ormar.Model):
raise QueryDefinitionError(
"You cannot use contains and icontains with instance of the Model"
)
self.has_escaped_character = self.has_escaped_characters()
if self.has_escaped_character:
self._escape_chars()
self._prefix_suffix_quote()
def _escape_chars(self) -> None:
"""Actually replaces chars to escape in value"""
for char in ESCAPE_CHARACTERS:
self.filter_value = self.filter_value.replace(char, f"\\{char}")
def _prefix_suffix_quote(self) -> None:
"""
Adds % to the beginning of the value if operator checks for containment and not
starts with.
Adds % to the end of the value if operator checks for containment and not
end with.
:return:
:rtype:
"""
prefix = "%" if "start" not in self.operator else ""
sufix = "%" if "end" not in self.operator else ""
self.filter_value = f"{prefix}{self.filter_value}{sufix}"
def get_text_clause(self) -> TextClause:
"""
Escapes characters if it's required.
Substitutes values of the models if value is a ormar Model with its pk value.
Compiles the clause.
:return: complied and escaped clause
:rtype: sqlalchemy.sql.elements.TextClause
"""
if isinstance(self.filter_value, ormar.Model):
self.filter_value = self.filter_value.pk
op_attr = FILTER_OPERATORS[self.operator]
if self.operator == "isnull":
op_attr = "is_" if self.filter_value else "isnot"
filter_value = None
else:
filter_value = self.filter_value
if self.table_prefix:
aliased_table = (
self.source_model.ormar_config.alias_manager.prefixed_table_name(
self.table_prefix, self.column.table
)
)
aliased_column = getattr(aliased_table.c, self.column.name)
else:
aliased_column = self.column
clause = getattr(aliased_column, op_attr)(filter_value)
if self.has_escaped_character:
clause.modifiers["escape"] = "\\"
return clause
collerek-ormar-c09209a/ormar/queryset/actions/order_action.py 0000664 0000000 0000000 00000010536 15130200524 0024441 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Optional, Type
import sqlalchemy
from sqlalchemy import text
from ormar.queryset.actions.query_action import QueryAction # noqa: I100, I202
if TYPE_CHECKING: # pragma: nocover
from ormar import Model
class OrderAction(QueryAction):
"""
Order Actions is populated by queryset when order_by() is called.
All required params are extracted but kept raw until actual filter clause value
is required -> then the action is converted into text() clause.
Extracted in order to easily change table prefixes on complex relations.
"""
def __init__(
self, order_str: str, model_cls: Type["Model"], alias: Optional[str] = None
) -> None:
self.direction: str = ""
super().__init__(query_str=order_str, model_cls=model_cls)
self.is_source_model_order = False
if alias:
self.table_prefix = alias
if self.source_model == self.target_model and "__" not in self.related_str:
self.is_source_model_order = True
@property
def field_alias(self) -> str:
return self.target_model.get_column_alias(self.field_name)
@property
def is_postgres_bool(self) -> bool:
dialect = self.target_model.ormar_config.database._backend._dialect.name
field_type = self.target_model.ormar_config.model_fields[
self.field_name
].__type__
return dialect == "postgresql" and field_type is bool
def get_field_name_text(self) -> str:
"""
Escapes characters if it's required.
Substitutes values of the models if value is a ormar Model with its pk value.
Compiles the clause.
:return: complied and escaped clause
:rtype: sqlalchemy.sql.elements.TextClause
"""
prefix = f"{self.table_prefix}_" if self.table_prefix else ""
return f"{prefix}{self.table}" f".{self.field_alias}"
def get_min_or_max(self) -> sqlalchemy.sql.expression.TextClause:
"""
Used in limit sub queries where you need to use aggregated functions
in order to order by columns not included in group by. For postgres bool
field it's using bool_or function as aggregates does not work with this type
of columns.
:return: min or max function to order
:rtype: sqlalchemy.sql.elements.TextClause
"""
prefix = f"{self.table_prefix}_" if self.table_prefix else ""
if self.direction == "":
function = "min" if not self.is_postgres_bool else "bool_or"
return text(f"{function}({prefix}{self.table}" f".{self.field_alias})")
function = "max" if not self.is_postgres_bool else "bool_or"
return text(f"{function}({prefix}{self.table}" f".{self.field_alias}) desc")
def get_text_clause(self) -> sqlalchemy.sql.expression.TextClause:
"""
Escapes characters if it's required.
Substitutes values of the models if value is a ormar Model with its pk value.
Compiles the clause.
:return: complied and escaped clause
:rtype: sqlalchemy.sql.elements.TextClause
"""
dialect = self.target_model.ormar_config.database._backend._dialect
quoter = dialect.identifier_preparer.quote
prefix = f"{self.table_prefix}_" if self.table_prefix else ""
table_name = self.table.name
field_name = self.field_alias
if not prefix:
table_name = quoter(table_name)
else:
table_name = quoter(f"{prefix}{table_name}")
field_name = quoter(field_name)
return text(f"{table_name}.{field_name} {self.direction}")
def _split_value_into_parts(self, order_str: str) -> None:
if order_str.startswith("-"):
self.direction = "desc"
order_str = order_str[1:]
parts = order_str.split("__")
self.field_name = parts[-1]
self.related_parts = parts[:-1]
def check_if_filter_apply(self, target_model: Type["Model"], alias: str) -> bool:
"""
Checks filter conditions to find if they apply to current join.
:param target_model: model which is now processed
:type target_model: Type["Model"]
:param alias: prefix of the relation
:type alias: str
:return: result of the check
:rtype: bool
"""
return target_model == self.target_model and alias == self.table_prefix
collerek-ormar-c09209a/ormar/queryset/actions/query_action.py 0000664 0000000 0000000 00000006223 15130200524 0024471 0 ustar 00root root 0000000 0000000 import abc
from typing import TYPE_CHECKING, Any, List, Type
import sqlalchemy
from ormar.queryset.utils import get_relationship_alias_model_and_str # noqa: I202
if TYPE_CHECKING: # pragma: nocover
from ormar import Model
class QueryAction(abc.ABC):
"""
Base QueryAction class with common params for Filter and Order actions.
"""
def __init__(self, query_str: str, model_cls: Type["Model"]) -> None:
self.query_str = query_str
self.field_name: str = ""
self.related_parts: List[str] = []
self.related_str: str = ""
self.table_prefix = ""
self.source_model = model_cls
self.target_model = model_cls
self.is_through = False
self._split_value_into_parts(query_str)
self._determine_filter_target_table()
def __eq__(self, other: object) -> bool: # pragma: no cover
if not isinstance(other, QueryAction):
return False
return self.query_str == other.query_str
def __hash__(self) -> Any:
return hash((self.table_prefix, self.query_str))
@abc.abstractmethod
def _split_value_into_parts(self, query_str: str) -> None: # pragma: no cover
"""
Splits string into related parts and field_name
:param query_str: query action string to split (i..e filter or order by)
:type query_str: str
"""
pass
@abc.abstractmethod
def get_text_clause(
self,
) -> sqlalchemy.sql.expression.TextClause: # pragma: no cover
pass
@property
def table(self) -> sqlalchemy.Table:
"""Shortcut to sqlalchemy Table of filtered target model"""
return self.target_model.ormar_config.table
@property
def column(self) -> sqlalchemy.Column:
"""Shortcut to sqlalchemy column of filtered target model"""
aliased_name = self.target_model.get_column_alias(self.field_name)
return self.target_model.ormar_config.table.columns[aliased_name]
def update_select_related(self, select_related: List[str]) -> List[str]:
"""
Updates list of select related with related part included in the filter key.
That way If you want to just filter by relation you do not have to provide
select_related separately.
:param select_related: list of relation join strings
:type select_related: List[str]
:return: list of relation joins with implied joins from filter added
:rtype: List[str]
"""
select_related = select_related[:]
if self.related_str and not any(
rel.startswith(self.related_str) for rel in select_related
):
select_related.append(self.related_str)
return select_related
def _determine_filter_target_table(self) -> None:
"""
Walks the relation to retrieve the actual model on which the clause should be
constructed, extracts alias based on last relation leading to target model.
"""
(
self.table_prefix,
self.target_model,
self.related_str,
self.is_through,
) = get_relationship_alias_model_and_str(self.source_model, self.related_parts)
collerek-ormar-c09209a/ormar/queryset/actions/select_action.py 0000664 0000000 0000000 00000003466 15130200524 0024611 0 ustar 00root root 0000000 0000000 import decimal
from typing import TYPE_CHECKING, Any, Callable, Optional, Type
import sqlalchemy
from ormar.queryset.actions.query_action import QueryAction # noqa: I202
if TYPE_CHECKING: # pragma: no cover
from ormar import Model
class SelectAction(QueryAction):
"""
Order Actions is populated by queryset when order_by() is called.
All required params are extracted but kept raw until actual filter clause value
is required -> then the action is converted into text() clause.
Extracted in order to easily change table prefixes on complex relations.
"""
def __init__(
self, select_str: str, model_cls: Type["Model"], alias: Optional[str] = None
) -> None:
super().__init__(query_str=select_str, model_cls=model_cls)
if alias: # pragma: no cover
self.table_prefix = alias
def _split_value_into_parts(self, order_str: str) -> None:
parts = order_str.split("__")
self.field_name = parts[-1]
self.related_parts = parts[:-1]
@property
def is_numeric(self) -> bool:
return self.get_target_field_type() in [int, float, decimal.Decimal]
def get_target_field_type(self) -> Any:
return self.target_model.ormar_config.model_fields[self.field_name].__type__
def get_text_clause(self) -> sqlalchemy.sql.expression.TextClause:
alias = f"{self.table_prefix}_" if self.table_prefix else ""
return sqlalchemy.text(f"{alias}{self.field_name}")
def apply_func(
self, func: Callable, use_label: bool = True
) -> sqlalchemy.sql.expression.TextClause:
result = func(self.get_text_clause())
if use_label:
rel_prefix = f"{self.related_str}__" if self.related_str else ""
result = result.label(f"{rel_prefix}{self.field_name}")
return result
collerek-ormar-c09209a/ormar/queryset/clause.py 0000664 0000000 0000000 00000027344 15130200524 0021612 0 ustar 00root root 0000000 0000000 import itertools
from dataclasses import dataclass
from enum import Enum
from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, Type, Union
import sqlalchemy
from sqlalchemy import ColumnElement
import ormar # noqa I100
from ormar.queryset.actions.filter_action import FilterAction
from ormar.queryset.utils import get_relationship_alias_model_and_str
if TYPE_CHECKING: # pragma no cover
from ormar import Model
class FilterType(Enum):
AND = 1
OR = 2
class FilterGroup:
"""
Filter groups are used in complex queries condition to group and and or
clauses in where condition
"""
def __init__(
self,
*args: Any,
_filter_type: FilterType = FilterType.AND,
_exclude: bool = False,
**kwargs: Any,
) -> None:
self.filter_type = _filter_type
self.exclude = _exclude
self._nested_groups: List["FilterGroup"] = list(args)
self._resolved = False
self.is_source_model_filter = False
self._kwargs_dict = kwargs
self.actions: List[FilterAction] = []
def __and__(self, other: "FilterGroup") -> "FilterGroup":
return FilterGroup(self, other)
def __or__(self, other: "FilterGroup") -> "FilterGroup":
return FilterGroup(self, other, _filter_type=FilterType.OR)
def __invert__(self) -> "FilterGroup":
self.exclude = not self.exclude
return self
def resolve(
self,
model_cls: Type["Model"],
select_related: Optional[List] = None,
filter_clauses: Optional[List] = None,
) -> Tuple[List[FilterAction], List[str]]:
"""
Resolves the FilterGroups actions to use proper target model, replace
complex relation prefixes if needed and nested groups also resolved.
:param model_cls: model from which the query is run
:type model_cls: Type["Model"]
:param select_related: list of models to join
:type select_related: List[str]
:param filter_clauses: list of filter conditions
:type filter_clauses: List[FilterAction]
:return: list of filter conditions and select_related list
:rtype: Tuple[List[FilterAction], List[str]]
"""
select_related = select_related if select_related is not None else []
filter_clauses = filter_clauses if filter_clauses is not None else []
qryclause = QueryClause(
model_cls=model_cls,
select_related=select_related,
filter_clauses=filter_clauses,
)
own_filter_clauses, select_related = qryclause.prepare_filter(
_own_only=True, **self._kwargs_dict
)
self.actions = own_filter_clauses
filter_clauses = filter_clauses + own_filter_clauses
self._resolved = True
if self._nested_groups:
for group in self._nested_groups:
(filter_clauses, select_related) = group.resolve(
model_cls=model_cls,
select_related=select_related,
filter_clauses=filter_clauses,
)
return filter_clauses, select_related
def _iter(self) -> Generator:
"""
Iterates all actions in a tree
:return: generator yielding from own actions and nested groups
:rtype: Generator
"""
for group in self._nested_groups:
yield from group._iter()
yield from self.actions
def _get_text_clauses(
self,
) -> List[Union[sqlalchemy.sql.expression.TextClause, ColumnElement[Any]]]:
"""
Helper to return list of text queries from actions and nested groups
:return: list of text queries from actions and nested groups
:rtype: List[sqlalchemy.sql.elements.TextClause]
"""
return [x.get_text_clause() for x in self._nested_groups] + [
x.get_text_clause() for x in self.actions
]
def get_text_clause(self) -> ColumnElement[bool]:
"""
Returns all own actions and nested groups conditions compiled and joined
inside parentheses.
Escapes characters if it's required.
Substitutes values of the models if value is a ormar Model with its pk value.
Compiles the clause.
:return: complied and escaped clause
:rtype: sqlalchemy.sql.elements.TextClause
"""
if self.filter_type == FilterType.AND:
clause = sqlalchemy.sql.and_(*self._get_text_clauses()).self_group()
else:
clause = sqlalchemy.sql.or_(*self._get_text_clauses()).self_group()
if self.exclude:
clause = sqlalchemy.sql.not_(clause)
return clause
def or_(*args: FilterGroup, **kwargs: Any) -> FilterGroup:
"""
Construct or filter from nested groups and keyword arguments
:param args: nested filter groups
:type args: Tuple[FilterGroup]
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: FilterGroup ready to be resolved
:rtype: ormar.queryset.clause.FilterGroup
"""
return FilterGroup(_filter_type=FilterType.OR, *args, **kwargs)
def and_(*args: FilterGroup, **kwargs: Any) -> FilterGroup:
"""
Construct and filter from nested groups and keyword arguments
:param args: nested filter groups
:type args: Tuple[FilterGroup]
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: FilterGroup ready to be resolved
:rtype: ormar.queryset.clause.FilterGroup
"""
return FilterGroup(_filter_type=FilterType.AND, *args, **kwargs)
@dataclass
class Prefix:
source_model: Type["Model"]
table_prefix: str
model_cls: Type["Model"]
relation_str: str
is_through: bool
@property
def alias_key(self) -> str:
source_model_name = self.source_model.get_name()
return f"{source_model_name}_" f"{self.relation_str}"
class QueryClause:
"""
Constructs FilterActions from strings passed as arguments
"""
def __init__(
self, model_cls: Type["Model"], filter_clauses: List, select_related: List
) -> None:
self._select_related = select_related[:]
self.filter_clauses = filter_clauses[:]
self.model_cls = model_cls
self.table = self.model_cls.ormar_config.table
def prepare_filter( # noqa: A003
self, _own_only: bool = False, **kwargs: Any
) -> Tuple[List[FilterAction], List[str]]:
"""
Main external access point that processes the clauses into sqlalchemy text
clauses and updates select_related list with implicit related tables
mentioned in select_related strings but not included in select_related.
:param _own_only:
:type _own_only:
:param kwargs: key, value pair with column names and values
:type kwargs: Any
:return: Tuple with list of where clauses and updated select_related list
:rtype: Tuple[List[sqlalchemy.sql.elements.TextClause], List[str]]
"""
if kwargs.get("pk"):
pk_name = self.model_cls.get_column_alias(
self.model_cls.ormar_config.pkname
)
kwargs[pk_name] = kwargs.pop("pk")
filter_clauses, select_related = self._populate_filter_clauses(
_own_only=_own_only, **kwargs
)
return filter_clauses, select_related
def _populate_filter_clauses(
self, _own_only: bool, **kwargs: Any
) -> Tuple[List[FilterAction], List[str]]:
"""
Iterates all clauses and extracts used operator and field from related
models if needed. Based on the chain of related names the target table
is determined and the final clause is escaped if needed and compiled.
:param kwargs: key, value pair with column names and values
:type kwargs: Any
:return: Tuple with list of where clauses and updated select_related list
:rtype: Tuple[List[sqlalchemy.sql.elements.TextClause], List[str]]
"""
filter_clauses = self.filter_clauses
own_filter_clauses = []
select_related = list(self._select_related)
for key, value in kwargs.items():
filter_action = FilterAction(
filter_str=key, value=value, model_cls=self.model_cls
)
select_related = filter_action.update_select_related(
select_related=select_related
)
own_filter_clauses.append(filter_action)
self._register_complex_duplicates(select_related)
filter_clauses = self._switch_filter_action_prefixes(
filter_clauses=filter_clauses + own_filter_clauses
)
if _own_only:
return own_filter_clauses, select_related
return filter_clauses, select_related
def _register_complex_duplicates(self, select_related: List[str]) -> None:
"""
Checks if duplicate aliases are presented which can happen in self relation
or when two joins end with the same pair of models.
If there are duplicates, the all duplicated joins are registered as source
model and whole relation key (not just last relation name).
:param select_related: list of relation strings
:type select_related: List[str]
:return: None
:rtype: None
"""
prefixes = self._parse_related_prefixes(select_related=select_related)
manager = self.model_cls.ormar_config.alias_manager
filtered_prefixes = sorted(prefixes, key=lambda x: x.table_prefix)
grouped = itertools.groupby(filtered_prefixes, key=lambda x: x.table_prefix)
for _, group in grouped:
sorted_group = sorted(
group, key=lambda x: len(x.relation_str), reverse=True
)
for prefix in sorted_group[:-1]:
if prefix.alias_key not in manager:
manager.add_alias(alias_key=prefix.alias_key)
def _parse_related_prefixes(self, select_related: List[str]) -> List[Prefix]:
"""
Walks all relation strings and parses the target models and prefixes.
:param select_related: list of relation strings
:type select_related: List[str]
:return: list of parsed prefixes
:rtype: List[Prefix]
"""
prefixes: List[Prefix] = []
for related in select_related:
prefix = Prefix(
self.model_cls,
*get_relationship_alias_model_and_str(
self.model_cls, related.split("__")
),
)
prefixes.append(prefix)
return prefixes
def _switch_filter_action_prefixes(
self, filter_clauses: List[FilterAction]
) -> List[FilterAction]:
"""
Substitutes aliases for filter action if the complex key (whole relation str) is
present in alias_manager.
:param filter_clauses: raw list of actions
:type filter_clauses: List[FilterAction]
:return: list of actions with aliases changed if needed
:rtype: List[FilterAction]
"""
for action in filter_clauses:
if isinstance(action, FilterGroup):
for action2 in action._iter():
self._verify_prefix_and_switch(action2)
else:
self._verify_prefix_and_switch(action)
return filter_clauses
def _verify_prefix_and_switch(self, action: "FilterAction") -> None:
"""
Helper to switch prefix to complex relation one if required
:param action: action to switch prefix in
:type action: ormar.queryset.actions.filter_action.FilterAction
"""
manager = self.model_cls.ormar_config.alias_manager
new_alias = manager.resolve_relation_alias(self.model_cls, action.related_str)
if "__" in action.related_str and new_alias:
action.table_prefix = new_alias
collerek-ormar-c09209a/ormar/queryset/field_accessor.py 0000664 0000000 0000000 00000022236 15130200524 0023276 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Any, Optional, Type, cast
from ormar.queryset.actions import OrderAction
from ormar.queryset.actions.filter_action import METHODS_TO_OPERATORS
from ormar.queryset.clause import FilterGroup
if TYPE_CHECKING: # pragma: no cover
from ormar import BaseField, Model
class FieldAccessor:
"""
Helper to access ormar fields directly from Model class also for nested
models attributes.
"""
def __init__(
self,
source_model: Type["Model"],
field: Optional["BaseField"] = None,
model: Optional[Type["Model"]] = None,
access_chain: str = "",
) -> None:
self._source_model = source_model
self._field = field
self._model = model
self._access_chain = access_chain
def __getattr__(self, item: str) -> Any:
"""
Accessor return new accessor for each field and nested models.
Thanks to that operator overload is possible to use in filter.
:param item: attribute name
:type item: str
:return: FieldAccessor for field or nested model
:rtype: ormar.queryset.field_accessor.FieldAccessor
"""
if (
object.__getattribute__(self, "_field")
and item == object.__getattribute__(self, "_field").name
):
return self._field
if (
object.__getattribute__(self, "_model")
and item
in object.__getattribute__(self, "_model").ormar_config.model_fields
):
field = cast("Model", self._model).ormar_config.model_fields[item]
if field.is_relation:
return FieldAccessor(
source_model=self._source_model,
model=field.to,
access_chain=self._access_chain + f"__{item}",
)
else:
return FieldAccessor(
source_model=self._source_model,
field=field,
access_chain=self._access_chain + f"__{item}",
)
return object.__getattribute__(self, item) # pragma: no cover
def _check_field(self) -> None:
if not self._field:
raise AttributeError(
"Cannot filter by Model, you need to provide model name"
)
def _select_operator(self, op: str, other: Any) -> FilterGroup:
self._check_field()
filter_kwg = {self._access_chain + f"__{METHODS_TO_OPERATORS[op]}": other}
return FilterGroup(**filter_kwg)
def __eq__(self, other: Any) -> FilterGroup: # type: ignore
"""
overloaded to work as sql `column = `
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="__eq__", other=other)
def __ge__(self, other: Any) -> FilterGroup:
"""
overloaded to work as sql `column >= `
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="__ge__", other=other)
def __gt__(self, other: Any) -> FilterGroup:
"""
overloaded to work as sql `column > `
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="__gt__", other=other)
def __le__(self, other: Any) -> FilterGroup:
"""
overloaded to work as sql `column <= `
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="__le__", other=other)
def __lt__(self, other: Any) -> FilterGroup:
"""
overloaded to work as sql `column < `
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="__lt__", other=other)
def __mod__(self, other: Any) -> FilterGroup:
"""
overloaded to work as sql `column LIKE '%%'`
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="__mod__", other=other)
def __lshift__(self, other: Any) -> FilterGroup:
"""
overloaded to work as sql `column IN (, ,...)`
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="in", other=other)
def __rshift__(self, other: Any) -> FilterGroup:
"""
overloaded to work as sql `column IS NULL`
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="isnull", other=True)
def in_(self, other: Any) -> FilterGroup:
"""
works as sql `column IN (, ,...)`
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="in", other=other)
def iexact(self, other: Any) -> FilterGroup:
"""
works as sql `column = ` case-insensitive
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="iexact", other=other)
def contains(self, other: Any) -> FilterGroup:
"""
works as sql `column LIKE '%%'`
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="contains", other=other)
def icontains(self, other: Any) -> FilterGroup:
"""
works as sql `column LIKE '%%'` case-insensitive
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="icontains", other=other)
def startswith(self, other: Any) -> FilterGroup:
"""
works as sql `column LIKE '%'`
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="startswith", other=other)
def istartswith(self, other: Any) -> FilterGroup:
"""
works as sql `column LIKE '%'` case-insensitive
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="istartswith", other=other)
def endswith(self, other: Any) -> FilterGroup:
"""
works as sql `column LIKE '%'`
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="endswith", other=other)
def iendswith(self, other: Any) -> FilterGroup:
"""
works as sql `column LIKE '%'` case-insensitive
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="iendswith", other=other)
def isnull(self, other: Any) -> FilterGroup:
"""
works as sql `column IS NULL` or `IS NOT NULL`
:param other: value to check agains operator
:type other: str
:return: FilterGroup for operator
:rtype: ormar.queryset.clause.FilterGroup
"""
return self._select_operator(op="isnull", other=other)
def asc(self) -> OrderAction:
"""
works as sql `column asc`
:return: OrderGroup for operator
:rtype: ormar.queryset.actions.OrderGroup
"""
return OrderAction(order_str=self._access_chain, model_cls=self._source_model)
def desc(self) -> OrderAction:
"""
works as sql `column desc`
:return: OrderGroup for operator
:rtype: ormar.queryset.actions.OrderGroup
"""
return OrderAction(
order_str="-" + self._access_chain, model_cls=self._source_model
)
collerek-ormar-c09209a/ormar/queryset/join.py 0000664 0000000 0000000 00000037151 15130200524 0021272 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, cast
import sqlalchemy
from sqlalchemy import TextClause, text
import ormar # noqa I100
from ormar.exceptions import ModelDefinitionError, RelationshipInstanceError
from ormar.relations import AliasManager
if TYPE_CHECKING: # pragma no cover
from ormar import ManyToManyField, Model
from ormar.models.excludable import ExcludableItems
from ormar.queryset import OrderAction
class SqlJoin:
def __init__( # noqa: CFQ002
self,
used_aliases: List,
select_from: sqlalchemy.sql.Select,
columns: List[sqlalchemy.Column],
excludable: "ExcludableItems",
order_columns: Optional[List["OrderAction"]],
sorted_orders: Dict,
main_model: Type["Model"],
relation_name: str,
relation_str: str,
related_models: Any = None,
own_alias: str = "",
source_model: Optional[Type["Model"]] = None,
already_sorted: Optional[Dict] = None,
) -> None:
self.relation_name = relation_name
self.related_models = related_models or []
self.select_from = select_from
self.columns = columns
self.excludable = excludable
self.order_columns = order_columns
self.sorted_orders = sorted_orders
self.already_sorted = already_sorted or dict()
self.main_model = main_model
self.own_alias = own_alias
self.used_aliases = used_aliases
self.target_field = self.main_model.ormar_config.model_fields[
self.relation_name
]
self._next_model: Optional[Type["Model"]] = None
self._next_alias: Optional[str] = None
self.relation_str = relation_str
self.source_model = source_model
@property
def next_model(self) -> Type["Model"]:
if not self._next_model: # pragma: nocover
raise RelationshipInstanceError(
"Cannot link to related table if relation.to model is not set."
)
return self._next_model
@next_model.setter
def next_model(self, value: Type["Model"]) -> None:
self._next_model = value
@property
def next_alias(self) -> str:
if not self._next_alias: # pragma: nocover
raise RelationshipInstanceError("Alias for given relation not found.")
return self._next_alias
@next_alias.setter
def next_alias(self, value: str) -> None:
self._next_alias = value
@property
def alias_manager(self) -> AliasManager:
"""
Shortcut for ormar's model AliasManager stored on OrmarConfig.
:return: alias manager from model's OrmarConfig
:rtype: AliasManager
"""
return self.main_model.ormar_config.alias_manager
@property
def to_table(self) -> sqlalchemy.Table:
"""
Shortcut to table name of the next model
:return: name of the target table
:rtype: str
"""
return self.next_model.ormar_config.table
def _on_clause(
self,
previous_alias: str,
from_table_name: str,
from_column_name: str,
to_table_name: str,
to_column_name: str,
) -> TextClause:
"""
Receives aliases and names of both ends of the join and combines them
into one text clause used in joins.
:param previous_alias: alias of previous table
:type previous_alias: str
:param from_table_name: from table name
:type from_table_name: str
:param from_column_name: from column name
:type from_column_name: str
:param to_table_name: to table name
:type to_table_name: str
:param to_column_name: to column name
:type to_column_name: str
:return: clause combining all strings
:rtype: sqlalchemy.text
"""
dialect = self.main_model.ormar_config.database._backend._dialect
quoter = dialect.identifier_preparer.quote
left_part = (
f"{quoter(f'{self.next_alias}_{to_table_name}')}.{quoter(to_column_name)}"
)
if not previous_alias:
right_part = f"{quoter(from_table_name)}.{quoter(from_column_name)}"
else:
right_part = (
f"{quoter(f'{previous_alias}_{from_table_name}')}.{from_column_name}"
)
return text(f"{left_part}={right_part}")
def build_join(self) -> Tuple[List, sqlalchemy.sql.Select, List, Dict]:
"""
Main external access point for building a join.
Splits the join definition, updates fields and exclude_fields if needed,
handles switching to through models for m2m relations, returns updated lists of
used_aliases and sort_orders.
:return: list of used aliases, select from, list of aliased columns, sort orders
:rtype: Tuple[List[str], Join, List[TextClause], Dict]
"""
if self.target_field.is_multi:
self._process_m2m_through_table()
self.next_model = self.target_field.to
self._forward_join()
self._process_following_joins()
return (self.used_aliases, self.select_from, self.columns, self.sorted_orders)
def _forward_join(self) -> None:
"""
Process actual join.
Registers complex relation join on encountering of the duplicated alias.
"""
self.next_alias = self.alias_manager.resolve_relation_alias(
from_model=self.target_field.owner, relation_name=self.relation_name
)
if self.next_alias not in self.used_aliases:
self._process_join()
else:
if "__" in self.relation_str and self.source_model:
relation_key = f"{self.source_model.get_name()}_{self.relation_str}"
if relation_key not in self.alias_manager:
self.next_alias = self.alias_manager.add_alias(
alias_key=relation_key
)
else:
self.next_alias = self.alias_manager[relation_key]
self._process_join()
def _process_following_joins(self) -> None:
"""
Iterates through nested models to create subsequent joins.
"""
for related_name in self.related_models:
remainder = None
if (
isinstance(self.related_models, dict)
and self.related_models[related_name]
):
remainder = self.related_models[related_name]
self._process_deeper_join(related_name=related_name, remainder=remainder)
def _process_deeper_join(self, related_name: str, remainder: Any) -> None:
"""
Creates nested recurrent instance of SqlJoin for each nested join table,
updating needed return params here as a side effect.
Updated are:
* self.used_aliases,
* self.select_from,
* self.columns,
* self.sorted_orders,
:param related_name: name of the relation to follow
:type related_name: str
:param remainder: deeper tables if there are more nested joins
:type remainder: Any
"""
sql_join = SqlJoin(
used_aliases=self.used_aliases,
select_from=self.select_from,
columns=self.columns,
excludable=self.excludable,
order_columns=self.order_columns,
sorted_orders=self.sorted_orders,
main_model=self.next_model,
relation_name=related_name,
related_models=remainder,
relation_str="__".join([self.relation_str, related_name]),
own_alias=self.next_alias,
source_model=self.source_model or self.main_model,
already_sorted=self.already_sorted,
)
(
self.used_aliases,
self.select_from,
self.columns,
self.sorted_orders,
) = sql_join.build_join()
def _process_m2m_through_table(self) -> None:
"""
Process Through table of the ManyToMany relation so that source table is
linked to the through table (one additional join)
Replaces needed parameters like:
* self.next_model,
* self.next_alias,
* self.relation_name,
* self.own_alias,
* self.target_field
To point to through model
"""
new_part = self._process_m2m_related_name_change()
self.next_model = self.target_field.through
self._forward_join()
self.relation_name = new_part
self.own_alias = self.next_alias
self.target_field = self.next_model.ormar_config.model_fields[
self.relation_name
]
def _process_m2m_related_name_change(self, reverse: bool = False) -> str:
"""
Extracts relation name to link join through the Through model declared on
relation field.
Changes the same names in order_by queries if they are present.
:param reverse: flag if it's on_clause lookup - use reverse fields
:type reverse: bool
:return: new relation name switched to through model field
:rtype: str
"""
target_field = self.target_field
is_primary_self_ref = (
target_field.self_reference
and self.relation_name == target_field.self_reference_primary
)
if (is_primary_self_ref and not reverse) or (
not is_primary_self_ref and reverse
):
new_part = target_field.default_source_field_name() # type: ignore
else:
new_part = target_field.default_target_field_name() # type: ignore
return new_part
def _process_join(self) -> None: # noqa: CFQ002
"""
Resolves to and from column names and table names.
Produces on_clause.
Performs actual join updating select_from parameter.
Adds aliases of required column to list of columns to include in query.
Updates the used aliases list directly.
Process order_by causes for non m2m relations.
"""
to_key, from_key = self._get_to_and_from_keys()
on_clause = self._on_clause(
previous_alias=self.own_alias,
from_table_name=self.target_field.owner.ormar_config.tablename,
from_column_name=from_key,
to_table_name=self.to_table.name,
to_column_name=to_key,
)
target_table = self.alias_manager.prefixed_table_name(
self.next_alias, self.to_table
)
self.select_from = sqlalchemy.sql.outerjoin(
self.select_from, target_table, on_clause # type: ignore
)
self._get_order_bys()
self_related_fields = self.next_model.own_table_columns(
model=self.next_model,
excludable=self.excludable,
alias=self.next_alias,
use_alias=True,
)
self.columns.extend(
self.alias_manager.prefixed_columns( # type: ignore
self.next_alias, target_table, self_related_fields # type: ignore
)
)
self.used_aliases.append(self.next_alias)
def _set_default_primary_key_order_by(self) -> None:
for order_by in self.next_model.ormar_config.orders_by:
clause = ormar.OrderAction(
order_str=order_by, model_cls=self.next_model, alias=self.next_alias
)
self.sorted_orders[clause] = clause.get_text_clause()
def _verify_allowed_order_field(self, order_by: str) -> None:
"""
Verifies if proper field string is used.
:param order_by: string with order by definition
:type order_by: str
"""
parts = order_by.split("__")
if len(parts) > 2 or parts[0] != self.target_field.through.get_name():
raise ModelDefinitionError(
"You can order the relation only " "by related or link table columns!"
)
def _get_alias_and_model(self, order_by: str) -> Tuple[str, Type["Model"]]:
"""
Returns proper model and alias to be applied in the clause.
:param order_by: string with order by definition
:type order_by: str
:return: alias and model to be used in clause
:rtype: Tuple[str, Type["Model"]]
"""
if self.target_field.is_multi and "__" in order_by:
self._verify_allowed_order_field(order_by=order_by)
alias = self.next_alias
model = self.target_field.owner
elif self.target_field.is_multi:
alias = self.alias_manager.resolve_relation_alias(
from_model=self.target_field.through,
relation_name=cast(
"ManyToManyField", self.target_field
).default_target_field_name(),
)
model = self.target_field.to
else:
alias = self.alias_manager.resolve_relation_alias(
from_model=self.target_field.owner, relation_name=self.target_field.name
)
model = self.target_field.to
return alias, model
def _get_order_bys(self) -> None: # noqa: CCR001
"""
Triggers construction of order bys if they are given.
Otherwise by default each table is sorted by a primary key column asc.
"""
alias = self.next_alias
current_table_sorted = False
if f"{alias}_{self.next_model.get_name()}" in self.already_sorted:
current_table_sorted = True
if self.order_columns:
for condition in self.order_columns:
if condition.check_if_filter_apply(
target_model=self.next_model, alias=alias
):
current_table_sorted = True
self.sorted_orders[condition] = condition.get_text_clause()
self.already_sorted[
f"{self.next_alias}_{self.next_model.get_name()}"
] = condition
if self.target_field.orders_by and not current_table_sorted:
current_table_sorted = True
for order_by in self.target_field.orders_by:
alias, model = self._get_alias_and_model(order_by=order_by)
clause = ormar.OrderAction(
order_str=order_by, model_cls=model, alias=alias
)
self.sorted_orders[clause] = clause.get_text_clause()
self.already_sorted[f"{alias}_{model.get_name()}"] = clause
if not current_table_sorted and not self.target_field.is_multi:
self._set_default_primary_key_order_by()
def _get_to_and_from_keys(self) -> Tuple[str, str]:
"""
Based on the relation type, name of the relation and previous models and parts
stored in JoinParameters it resolves the current to and from keys, which are
different for ManyToMany relation, ForeignKey and reverse related of relations.
:return: to key and from key
:rtype: Tuple[str, str]
"""
if self.target_field.is_multi:
to_key = self._process_m2m_related_name_change(reverse=True)
from_key = self.main_model.get_column_alias(
self.main_model.ormar_config.pkname
)
elif self.target_field.virtual:
to_field = self.target_field.get_related_name()
to_key = self.target_field.to.get_column_alias(to_field)
from_key = self.main_model.get_column_alias(
self.main_model.ormar_config.pkname
)
else:
to_key = self.target_field.to.get_column_alias(
self.target_field.to.ormar_config.pkname
)
from_key = self.main_model.get_column_alias(self.relation_name)
return to_key, from_key
collerek-ormar-c09209a/ormar/queryset/queries/ 0000775 0000000 0000000 00000000000 15130200524 0021427 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/queryset/queries/__init__.py 0000664 0000000 0000000 00000000726 15130200524 0023545 0 ustar 00root root 0000000 0000000 from ormar.queryset.queries.filter_query import FilterQuery
from ormar.queryset.queries.limit_query import LimitQuery
from ormar.queryset.queries.offset_query import OffsetQuery
from ormar.queryset.queries.order_query import OrderQuery
from ormar.queryset.queries.prefetch_query import PrefetchQuery
from ormar.queryset.queries.query import Query
__all__ = [
"FilterQuery",
"LimitQuery",
"OffsetQuery",
"OrderQuery",
"PrefetchQuery",
"Query",
]
collerek-ormar-c09209a/ormar/queryset/queries/filter_query.py 0000664 0000000 0000000 00000002415 15130200524 0024515 0 ustar 00root root 0000000 0000000 from typing import Any, List, Union
import sqlalchemy
from sqlalchemy import ColumnElement, Select, TextClause
from ormar.queryset.actions.filter_action import FilterAction
class FilterQuery:
"""
Modifies the select query with given list of where/filter clauses.
"""
def __init__(
self, filter_clauses: List[FilterAction], exclude: bool = False
) -> None:
self.exclude = exclude
self.filter_clauses = filter_clauses
def apply(
self,
expr: Select[Any],
) -> Select[Any]:
"""
Applies all filter clauses if set.
:param expr: query to modify
:type expr: sqlalchemy.sql.selectable.Select
:return: modified query
:rtype: sqlalchemy.sql.selectable.Select
"""
if self.filter_clauses:
if len(self.filter_clauses) == 1:
clause: Union[TextClause, ColumnElement[Any]] = self.filter_clauses[
0
].get_text_clause()
else:
clause = sqlalchemy.sql.and_(
*[x.get_text_clause() for x in self.filter_clauses]
)
clause = sqlalchemy.sql.not_(clause) if self.exclude else clause
expr = expr.where(clause)
return expr
collerek-ormar-c09209a/ormar/queryset/queries/limit_query.py 0000664 0000000 0000000 00000001211 15130200524 0024337 0 ustar 00root root 0000000 0000000 from typing import Optional
import sqlalchemy
class LimitQuery:
"""
Modifies the select query with limit clause.
"""
def __init__(self, limit_count: Optional[int]) -> None:
self.limit_count = limit_count
def apply(self, expr: sqlalchemy.sql.Select) -> sqlalchemy.sql.Select:
"""
Applies the limit clause.
:param expr: query to modify
:type expr: sqlalchemy.sql.selectable.Select
:return: modified query
:rtype: sqlalchemy.sql.selectable.Select
"""
if self.limit_count is not None:
expr = expr.limit(self.limit_count)
return expr
collerek-ormar-c09209a/ormar/queryset/queries/offset_query.py 0000664 0000000 0000000 00000001203 15130200524 0024510 0 ustar 00root root 0000000 0000000 from typing import Optional
import sqlalchemy
class OffsetQuery:
"""
Modifies the select query with offset if set
"""
def __init__(self, query_offset: Optional[int]) -> None:
self.query_offset = query_offset
def apply(self, expr: sqlalchemy.sql.Select) -> sqlalchemy.sql.Select:
"""
Applies the offset clause.
:param expr: query to modify
:type expr: sqlalchemy.sql.selectable.Select
:return: modified query
:rtype: sqlalchemy.sql.selectable.Select
"""
if self.query_offset:
expr = expr.offset(self.query_offset)
return expr
collerek-ormar-c09209a/ormar/queryset/queries/order_query.py 0000664 0000000 0000000 00000001365 15130200524 0024346 0 ustar 00root root 0000000 0000000 from typing import Dict
import sqlalchemy
class OrderQuery:
"""
Modifies the select query with given list of order_by clauses.
"""
def __init__(self, sorted_orders: Dict) -> None:
self.sorted_orders = sorted_orders
def apply(self, expr: sqlalchemy.sql.Select) -> sqlalchemy.sql.Select:
"""
Applies all order_by clauses if set.
:param expr: query to modify
:type expr: sqlalchemy.sql.selectable.Select
:return: modified query
:rtype: sqlalchemy.sql.selectable.Select
"""
if self.sorted_orders:
for order in list(self.sorted_orders.values()):
if order is not None:
expr = expr.order_by(order)
return expr
collerek-ormar-c09209a/ormar/queryset/queries/prefetch_query.py 0000664 0000000 0000000 00000050566 15130200524 0025042 0 ustar 00root root 0000000 0000000 import abc
import logging
from abc import abstractmethod
from typing import (
TYPE_CHECKING,
Any,
Dict,
List,
Sequence,
Tuple,
Type,
Union,
cast,
)
import ormar # noqa: I100, I202
from ormar.queryset.clause import QueryClause
from ormar.queryset.queries.query import Query
from ormar.queryset.utils import translate_list_to_dict
if TYPE_CHECKING: # pragma: no cover
from ormar import ForeignKeyField, Model
from ormar.models.excludable import ExcludableItems
from ormar.queryset import FilterAction, OrderAction
logger = logging.getLogger(__name__)
class UniqueList(list):
"""
Simple subclass of list that prevents the duplicates
Cannot use set as the order is important
"""
def append(self, item: Any) -> None:
if item not in self:
super().append(item)
class Node(abc.ABC):
"""
Base Node use to build a query tree and divide job into already loaded models
and the ones that still need to be fetched from database
"""
def __init__(self, relation_field: "ForeignKeyField", parent: "Node") -> None:
self.parent = parent
self.children: List["Node"] = []
if self.parent:
self.parent.children.append(self)
self.relation_field = relation_field
self.table_prefix = ""
self.rows: List = []
self.models: List["Model"] = []
self.use_alias: bool = False
@property
def target_name(self) -> str:
"""
Return the name of the relation that is used to
fetch excludes/includes from the excludable mixin
as well as specifying the target to join in m2m relations
:return: name of the relation
:rtype: str
"""
if (
self.relation_field.self_reference
and self.relation_field.self_reference_primary == self.relation_field.name
):
return self.relation_field.default_source_field_name()
else:
return self.relation_field.default_target_field_name()
@abstractmethod
def extract_related_ids(self, column_name: str) -> List: # pragma: no cover
pass
@abstractmethod
def reload_tree(self) -> None: # pragma: no cover
pass
@abstractmethod
async def load_data(self) -> None: # pragma: no cover
pass
def get_filter_for_prefetch(self) -> List["FilterAction"]:
"""
Populates where clause with condition to return only models within the
set of extracted ids.
If there are no ids for relation the empty list is returned.
:return: list of filter clauses based on original models
:rtype: List[sqlalchemy.sql.elements.TextClause]
"""
column_name = self.relation_field.get_model_relation_fields(
self.parent.use_alias
)
ids = self.parent.extract_related_ids(column_name=column_name)
if ids:
return self._prepare_filter_clauses(ids=ids)
return []
def _prepare_filter_clauses(self, ids: List) -> List["FilterAction"]:
"""
Gets the list of ids and construct a list of filter queries on
extracted appropriate column names
:param ids: list of ids that should be used to fetch data
:type ids: List
:return: list of filter actions to use in query
:rtype: List["FilterAction"]
"""
clause_target = self.relation_field.get_filter_clause_target()
filter_column = self.relation_field.get_related_field_alias()
qryclause = QueryClause(
model_cls=clause_target,
select_related=[],
filter_clauses=[],
)
kwargs = {f"{cast(str, filter_column)}__in": ids}
filter_clauses, _ = qryclause.prepare_filter(_own_only=False, **kwargs)
return filter_clauses
class AlreadyLoadedNode(Node):
"""
Node that was already loaded in select statement
"""
def __init__(self, relation_field: "ForeignKeyField", parent: "Node") -> None:
super().__init__(relation_field=relation_field, parent=parent)
self.use_alias = False
self._extract_own_models()
def _extract_own_models(self) -> None:
"""
Extract own models that were already fetched and attached to root node
"""
for model in self.parent.models:
child_models = getattr(model, self.relation_field.name)
if isinstance(child_models, list):
self.models.extend(child_models)
elif child_models:
self.models.append(child_models)
async def load_data(self) -> None:
"""
Triggers a data load in the child nodes
"""
for child in self.children:
await child.load_data()
def reload_tree(self) -> None:
"""
After data was loaded we reload whole tree from the bottom
to include freshly loaded nodes
"""
for child in self.children:
child.reload_tree()
def extract_related_ids(self, column_name: str) -> List:
"""
Extracts the selected column(s) values from own models.
Those values are used to construct filter clauses and populate child models.
:param column_name: names of the column(s) that holds the relation info
:type column_name: Union[str, List[str]]
:return: List of extracted values of relation columns
:rtype: List
"""
list_of_ids = UniqueList()
for model in self.models:
child = getattr(model, column_name)
if isinstance(child, ormar.Model):
list_of_ids.append(child.pk)
elif child is not None:
list_of_ids.append(child)
return list_of_ids
class RootNode(AlreadyLoadedNode):
"""
Root model Node from which both main and prefetch query originated
"""
def __init__(self, models: List["Model"]) -> None:
self.models = models
self.use_alias = False
self.children = []
def reload_tree(self) -> None:
for child in self.children:
child.reload_tree()
class LoadNode(Node):
"""
Nodes that actually need to be fetched from database in the prefetch query
"""
def __init__(
self,
relation_field: "ForeignKeyField",
excludable: "ExcludableItems",
orders_by: List["OrderAction"],
parent: "Node",
source_model: Type["Model"],
) -> None:
super().__init__(relation_field=relation_field, parent=parent)
self.excludable = excludable
self.exclude_prefix: str = ""
self.orders_by = orders_by
self.use_alias = True
self.grouped_models: Dict[Any, List["Model"]] = dict()
self.source_model = source_model
async def load_data(self) -> None:
"""
Ensures that at least primary key columns from current model are included in
the query.
Gets the filter values from the parent model and runs the query.
Triggers a data load in child tasks.
"""
self._update_excludable_with_related_pks()
if self.relation_field.is_multi:
query_target = self.relation_field.through
select_related = [self.target_name]
else:
query_target = self.relation_field.to
select_related = []
filter_clauses = self.get_filter_for_prefetch()
if filter_clauses:
qry = Query(
model_cls=query_target,
select_related=select_related,
filter_clauses=filter_clauses,
exclude_clauses=[],
offset=None,
limit_count=None,
excludable=self.excludable,
order_bys=self._extract_own_order_bys(),
limit_raw_sql=False,
)
expr = qry.build_select_expression()
logger.debug(
expr.compile(
dialect=self.source_model.ormar_config.database._backend._dialect,
compile_kwargs={"literal_binds": True},
)
)
self.rows = await query_target.ormar_config.database.fetch_all(expr)
for child in self.children:
await child.load_data()
def _update_excludable_with_related_pks(self) -> None:
"""
Makes sure that excludable is populated with own model primary keys values
if the excludable has the exclude/include clauses
"""
related_field_names = self.relation_field.get_related_field_name()
alias_manager = self.relation_field.to.ormar_config.alias_manager
relation_key = self._build_relation_key()
self.exclude_prefix = alias_manager.resolve_relation_alias_after_complex(
source_model=self.source_model,
relation_str=relation_key,
relation_field=self.relation_field,
)
if self.relation_field.is_multi:
self.table_prefix = self.exclude_prefix
target_model = self.relation_field.to
model_excludable = self.excludable.get(
model_cls=target_model, alias=self.exclude_prefix
)
# includes nested pks if not included already
for related_name in related_field_names:
if model_excludable.include and not model_excludable.is_included(
related_name
):
model_excludable.set_values({related_name}, is_exclude=False)
def _build_relation_string(self) -> str:
node: Union[LoadNode, Node] = self
relation = node.relation_field.name
while not isinstance(node.parent, RootNode):
relation = node.parent.relation_field.name + "__" + relation
node = node.parent
return relation
def _build_relation_key(self) -> str:
relation_key = self._build_relation_string()
return relation_key
def _extract_own_order_bys(self) -> List["OrderAction"]:
"""
Extracts list of order actions related to current model.
Since same model can happen multiple times in a tree we check not only the
match on given model but also that path from relation tree matches the
path in order action.
:return: list of order actions related to current model
:rtype: List[OrderAction]
"""
own_order_bys = []
own_path = self._get_full_tree_path()
for order_by in self.orders_by:
if (
order_by.target_model == self.relation_field.to
and order_by.related_str.endswith(f"{own_path}")
):
order_by.is_source_model_order = True
order_by.table_prefix = self.table_prefix
own_order_bys.append(order_by)
return own_order_bys
def _get_full_tree_path(self) -> str:
"""
Iterates the nodes to extract path from root node.
:return: path from root node
:rtype: str
"""
node: Node = self
relation_str = node.relation_field.name
while not isinstance(node.parent, RootNode):
node = node.parent
relation_str = f"{node.relation_field.name}__{relation_str}"
return relation_str
def extract_related_ids(self, column_name: str) -> List:
"""
Extracts the selected column(s) values from own models.
Those values are used to construct filter clauses and populate child models.
:param column_names: names of the column(s) that holds the relation info
:type column_names: Union[str, List[str]]
:return: List of extracted values of relation columns
:rtype: List
"""
column_name = self._prefix_column_names_with_table_prefix(
column_name=column_name
)
return self._extract_simple_relation_keys(column_name=column_name)
def _prefix_column_names_with_table_prefix(self, column_name: str) -> str:
return (f"{self.table_prefix}_" if self.table_prefix else "") + column_name
def _extract_simple_relation_keys(self, column_name: str) -> List:
"""
Extracts simple relation keys values.
:param column_name: names of the column(s) that holds the relation info
:type column_name: str
:return: List of extracted values of relation columns
:rtype: List
"""
list_of_ids = UniqueList()
for row in self.rows:
if row[column_name]:
list_of_ids.append(row[column_name])
return list_of_ids
def reload_tree(self) -> None:
"""
Instantiates models from loaded database rows.
Groups those instances by relation key for easy extract per parent.
Triggers same for child nodes and then populates
the parent node with own related models
"""
if self.rows:
self._instantiate_models()
self._group_models_by_relation_key()
for child in self.children:
child.reload_tree()
self._populate_parent_models()
def _instantiate_models(self) -> None:
"""
Iterates the rows and initializes instances of ormar.Models.
Each model is instantiated only once (they can be duplicates for m2m relation
when multiple parent models refer to same child model since the query have to
also include the through model - hence full rows are unique, but related
models without through models can be not unique).
"""
fields_to_exclude = self.relation_field.to.get_names_to_exclude(
excludable=self.excludable, alias=self.exclude_prefix
)
parsed_rows: Dict[Tuple, "Model"] = {}
for row in self.rows:
item = self.relation_field.to.extract_prefixed_table_columns(
item={},
row=row,
table_prefix=self.table_prefix,
excludable=self.excludable,
)
hashable_item = self._hash_item(item)
instance = parsed_rows.setdefault(
hashable_item,
self.relation_field.to(**item, **{"__excluded__": fields_to_exclude}),
)
self.models.append(instance)
def _hash_item(self, item: Dict) -> Tuple:
"""
Converts model dictionary into tuple to make it hashable and allow to use it
as a dictionary key - used to ensure unique instances of related models.
:param item: instance dictionary
:type item: Dict
:return: tuple out of model dictionary
:rtype: Tuple
"""
result = []
for key, value in sorted(item.items()):
result.append(
(key, self._hash_item(value) if isinstance(value, dict) else value)
)
return tuple(result)
def _group_models_by_relation_key(self) -> None:
"""
Groups own models by relation keys so it's easy later to extract those models
when iterating parent models. Note that order is important as it reflects
order by issued by the user.
"""
relation_key = self.relation_field.get_related_field_alias()
for index, row in enumerate(self.rows):
key = row[relation_key]
current_group = self.grouped_models.setdefault(key, [])
current_group.append(self.models[index])
def _populate_parent_models(self) -> None:
"""
Populate parent node models with own child models from grouped dictionary
"""
relation_key = self._get_relation_key_linking_models()
for model in self.parent.models:
children = self._get_own_models_related_to_parent(
model=model, relation_key=relation_key
)
for child in children:
setattr(model, self.relation_field.name, child)
def _get_relation_key_linking_models(self) -> Tuple[str, str]:
"""
Extract names and aliases of relation columns to use
in linking between own models and parent models
:return: tuple of name and alias of relation columns
:rtype: List[Tuple[str, str]]
"""
column_name = self.relation_field.get_model_relation_fields(False)
column_alias = self.relation_field.get_model_relation_fields(True)
return column_name, column_alias
def _get_own_models_related_to_parent(
self, model: "Model", relation_key: Tuple[str, str]
) -> List["Model"]:
"""
Extracts related column values from parent and based on this key gets the
own grouped models.
:param model: parent model from parent node
:type model: Model
:param relation_key: name and aliases linking relations
:type relation_key: List[Tuple[str, str]]
:return: list of own models to set on parent
:rtype: List[Model]
"""
column_name, column_alias = relation_key
model_value = getattr(model, column_name)
if isinstance(model_value, ormar.Model):
model_value = model_value.pk
return self.grouped_models.get(model_value, [])
class PrefetchQuery:
"""
Query used to fetch related models in subsequent queries.
Each model is fetched only ones by the name of the relation.
That means that for each prefetch_related entry next query is issued to database.
"""
def __init__( # noqa: CFQ002
self,
model_cls: Type["Model"],
excludable: "ExcludableItems",
prefetch_related: List,
select_related: List,
orders_by: List["OrderAction"],
) -> None:
self.model = model_cls
self.excludable = excludable
self.select_dict = translate_list_to_dict(select_related, default={})
self.prefetch_dict = translate_list_to_dict(prefetch_related, default={})
self.orders_by = orders_by
self.load_tasks: List[Node] = []
async def prefetch_related(self, models: Sequence["Model"]) -> Sequence["Model"]:
"""
Main entry point for prefetch_query.
Receives list of already initialized parent models with all children from
select_related already populated. Receives also list of row sql result rows
as it's quicker to extract ids that way instead of calling each model.
Returns list with related models already prefetched and set.
:param models: list of already instantiated models from main query
:type models: Sequence[Model]
:param rows: row sql result of the main query before the prefetch
:type rows: List[sqlalchemy.engine.result.RowProxy]
:return: list of models with children prefetched
:rtype: List[Model]
"""
parent_task = RootNode(models=cast(List["Model"], models))
self._build_load_tree(
prefetch_dict=self.prefetch_dict,
select_dict=self.select_dict,
parent=parent_task,
model=self.model,
)
await parent_task.load_data()
parent_task.reload_tree()
return parent_task.models
def _build_load_tree(
self,
select_dict: Dict,
prefetch_dict: Dict,
parent: Node,
model: Type["Model"],
) -> None:
"""
Build a tree of already loaded nodes and nodes that need
to be loaded through the prefetch query.
:param select_dict: dictionary wth select query structure
:type select_dict: Dict
:param prefetch_dict: dictionary with prefetch query structure
:type prefetch_dict: Dict
:param parent: parent Node
:type parent: Node
:param model: currently processed model
:type model: Model
"""
for related in prefetch_dict.keys():
relation_field = cast(
"ForeignKeyField", model.ormar_config.model_fields[related]
)
if related in select_dict:
task: Node = AlreadyLoadedNode(
relation_field=relation_field, parent=parent
)
else:
task = LoadNode(
relation_field=relation_field,
excludable=self.excludable,
orders_by=self.orders_by,
parent=parent,
source_model=self.model,
)
if prefetch_dict:
self._build_load_tree(
select_dict=select_dict.get(related, {}),
prefetch_dict=prefetch_dict.get(related, {}),
parent=task,
model=model.ormar_config.model_fields[related].to,
)
collerek-ormar-c09209a/ormar/queryset/queries/query.py 0000664 0000000 0000000 00000023041 15130200524 0023146 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union
import sqlalchemy
from sqlalchemy import Column, Select, Table, TextClause
from sqlalchemy.sql import Join
import ormar # noqa I100
from ormar.models.helpers.models import group_related_list
from ormar.queryset.actions.filter_action import FilterAction
from ormar.queryset.join import SqlJoin
from ormar.queryset.queries import FilterQuery, LimitQuery, OffsetQuery, OrderQuery
if TYPE_CHECKING: # pragma no cover
from ormar import Model
from ormar.models.excludable import ExcludableItems
from ormar.queryset import OrderAction
class Query:
def __init__( # noqa CFQ002
self,
model_cls: Type["Model"],
filter_clauses: List[FilterAction],
exclude_clauses: List[FilterAction],
select_related: List,
limit_count: Optional[int],
offset: Optional[int],
excludable: "ExcludableItems",
order_bys: Optional[List["OrderAction"]],
limit_raw_sql: bool,
) -> None:
self.query_offset = offset
self.limit_count = limit_count
self._select_related = select_related[:]
self.filter_clauses = filter_clauses[:]
self.exclude_clauses = exclude_clauses[:]
self.excludable = excludable
self.model_cls = model_cls
self.table = self.model_cls.ormar_config.table
self.used_aliases: List[str] = []
self.select_from: Union[Join, Table, List[str]] = []
self.columns: List[Column] = []
self.order_columns = order_bys
self.sorted_orders: Dict[OrderAction, TextClause] = {}
self._init_sorted_orders()
self.limit_raw_sql = limit_raw_sql
def _init_sorted_orders(self) -> None:
"""
Initialize empty order_by dict to be populated later during the query call
"""
if self.order_columns:
for clause in self.order_columns:
self.sorted_orders[clause] = None # type: ignore
def apply_order_bys_for_primary_model(self) -> None: # noqa: CCR001
"""
Applies order_by queries on main model when it's used as a subquery.
That way the subquery with limit and offset only on main model has proper
sorting applied and correct models are fetched.
"""
current_table_sorted = False
if self.order_columns:
for clause in self.order_columns:
if clause.is_source_model_order:
current_table_sorted = True
self.sorted_orders[clause] = clause.get_text_clause()
if not current_table_sorted:
self._apply_default_model_sorting()
def _apply_default_model_sorting(self) -> None:
"""
Applies orders_by from model OrmarConfig (if provided), if it was not provided
it was filled by metaclass, so it's always there and falls back to pk column
"""
for order_by in self.model_cls.ormar_config.orders_by:
clause = ormar.OrderAction(order_str=order_by, model_cls=self.model_cls)
self.sorted_orders[clause] = clause.get_text_clause()
def _pagination_query_required(self) -> bool:
"""
Checks if limit or offset are set, the flag limit_sql_raw is not set
and query has select_related applied. Otherwise we can limit/offset normally
at the end of whole query.
:return: result of the check
:rtype: bool
"""
return bool(
(self.limit_count or self.query_offset)
and not self.limit_raw_sql
and self._select_related
)
def build_select_expression(self) -> sqlalchemy.sql.Select:
"""
Main entry point from outside (after proper initialization).
Extracts columns list to fetch,
construct all required joins for select related,
then applies all conditional and sort clauses.
Returns ready to run query with all joins and clauses.
:return: ready to run query with all joins and clauses.
:rtype: sqlalchemy.sql.selectable.Select
"""
self_related_fields = self.model_cls.own_table_columns(
model=self.model_cls, excludable=self.excludable, use_alias=True
)
self.columns = self.model_cls.ormar_config.alias_manager.prefixed_columns( # type: ignore
"", self.table, self_related_fields
)
self.apply_order_bys_for_primary_model()
self.select_from = self.table
related_models = group_related_list(self._select_related)
for related in related_models:
remainder = None
if isinstance(related_models, dict) and related_models[related]:
remainder = related_models[related]
sql_join = SqlJoin(
used_aliases=self.used_aliases,
select_from=self.select_from, # type: ignore
columns=self.columns,
excludable=self.excludable,
order_columns=self.order_columns,
sorted_orders=self.sorted_orders,
main_model=self.model_cls,
relation_name=related,
relation_str=related,
related_models=remainder,
)
(
self.used_aliases,
self.select_from,
self.columns,
self.sorted_orders,
) = sql_join.build_join() # type: ignore
if self._pagination_query_required():
limit_qry, on_clause = self._build_pagination_condition()
self.select_from = sqlalchemy.sql.join(
self.select_from, limit_qry, on_clause
)
expr = sqlalchemy.sql.select(*self.columns)
expr = expr.select_from(self.select_from)
expr = self._apply_expression_modifiers(expr)
# print("\n", expr.compile(compile_kwargs={"literal_binds": True}))
self._reset_query_parameters()
return expr
def _build_pagination_condition(
self,
) -> Tuple[
sqlalchemy.sql.expression.TextClause, sqlalchemy.sql.expression.TextClause
]:
"""
In order to apply limit and offset on main table in join only
(otherwise you can get only partially constructed main model
if number of children exceeds the applied limit and select_related is used)
Used also to change first and get() without argument behaviour.
Needed only if limit or offset are set, the flag limit_sql_raw is not set
and query has select_related applied. Otherwise we can limit/offset normally
at the end of whole query.
The condition is added to filters to filter out desired number of main model
primary key values. Whole query is used to determine the values.
"""
pk_alias = self.model_cls.get_column_alias(self.model_cls.ormar_config.pkname)
pk_aliased_name = f"{self.table.name}.{pk_alias}"
qry_text = sqlalchemy.text(f"{pk_aliased_name}")
maxes = {}
for order in list(self.sorted_orders.keys()):
if order is not None and order.get_field_name_text() != pk_aliased_name:
aliased_col = order.get_field_name_text()
# maxes[aliased_col] = order.get_text_clause()
maxes[aliased_col] = order.get_min_or_max()
elif order.get_field_name_text() == pk_aliased_name:
maxes[pk_aliased_name] = order.get_text_clause()
limit_qry: Select[Any] = sqlalchemy.sql.select(qry_text)
limit_qry = limit_qry.select_from(self.select_from) # type: ignore
limit_qry = FilterQuery(filter_clauses=self.filter_clauses).apply(limit_qry)
limit_qry = FilterQuery(
filter_clauses=self.exclude_clauses, exclude=True
).apply(limit_qry)
limit_qry = limit_qry.group_by(qry_text)
for order_by in maxes.values():
limit_qry = limit_qry.order_by(order_by)
limit_qry = LimitQuery(limit_count=self.limit_count).apply(limit_qry)
limit_qry = OffsetQuery(query_offset=self.query_offset).apply(limit_qry)
limit_qry = limit_qry.alias("limit_query") # type: ignore
on_clause = sqlalchemy.text(
f"limit_query.{pk_alias}={self.table.name}.{pk_alias}"
)
return limit_qry, on_clause # type: ignore
def _apply_expression_modifiers(
self, expr: sqlalchemy.sql.Select
) -> sqlalchemy.sql.Select:
"""
Receives the select query (might be join) and applies:
* Filter clauses
* Exclude filter clauses
* Limit clauses
* Offset clauses
* Order by clauses
Returns complete ready to run query.
:param expr: select expression before clauses
:type expr: sqlalchemy.sql.selectable.Select
:return: expression with all present clauses applied
:rtype: sqlalchemy.sql.selectable.Select
"""
expr = FilterQuery(filter_clauses=self.filter_clauses).apply(expr)
expr = FilterQuery(filter_clauses=self.exclude_clauses, exclude=True).apply(
expr
)
if not self._pagination_query_required():
expr = LimitQuery(limit_count=self.limit_count).apply(expr)
expr = OffsetQuery(query_offset=self.query_offset).apply(expr)
expr = OrderQuery(sorted_orders=self.sorted_orders).apply(expr)
return expr
def _reset_query_parameters(self) -> None:
"""
Although it should be created each time before the call we reset the key params
anyway.
"""
self.select_from = []
self.columns = []
self.used_aliases = []
collerek-ormar-c09209a/ormar/queryset/queryset.py 0000664 0000000 0000000 00000134700 15130200524 0022212 0 ustar 00root root 0000000 0000000 import asyncio
from typing import (
TYPE_CHECKING,
Any,
AsyncGenerator,
Dict,
Generic,
List,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
import databases
import sqlalchemy
from sqlalchemy import bindparam
import ormar # noqa I100
from ormar import MultipleMatches, NoMatch
from ormar.exceptions import (
ModelListEmptyError,
ModelPersistenceError,
QueryDefinitionError,
)
from ormar.queryset import FieldAccessor, FilterQuery, SelectAction
from ormar.queryset.actions.order_action import OrderAction
from ormar.queryset.clause import FilterGroup, QueryClause
from ormar.queryset.queries.prefetch_query import PrefetchQuery
from ormar.queryset.queries.query import Query
from ormar.queryset.reverse_alias_resolver import ReverseAliasResolver
if TYPE_CHECKING: # pragma no cover
from ormar import Model
from ormar.models import T
from ormar.models.excludable import ExcludableItems
from ormar.models.ormar_config import OrmarConfig
else:
T = TypeVar("T", bound="Model")
class QuerySet(Generic[T]):
"""
Main class to perform database queries, exposed on each model as objects attribute.
"""
def __init__( # noqa CFQ002
self,
model_cls: Optional[Type["T"]] = None,
filter_clauses: Optional[List] = None,
exclude_clauses: Optional[List] = None,
select_related: Optional[List] = None,
limit_count: Optional[int] = None,
offset: Optional[int] = None,
excludable: Optional["ExcludableItems"] = None,
order_bys: Optional[List] = None,
prefetch_related: Optional[List] = None,
limit_raw_sql: bool = False,
proxy_source_model: Optional[Type["Model"]] = None,
) -> None:
self.proxy_source_model = proxy_source_model
self.model_cls = model_cls
self.filter_clauses = [] if filter_clauses is None else filter_clauses
self.exclude_clauses = [] if exclude_clauses is None else exclude_clauses
self._select_related = [] if select_related is None else select_related
self._prefetch_related = [] if prefetch_related is None else prefetch_related
self.limit_count = limit_count
self.query_offset = offset
self._excludable = excludable or ormar.ExcludableItems()
self.order_bys = order_bys or []
self.limit_sql_raw = limit_raw_sql
@property
def model_config(self) -> "OrmarConfig":
"""
Shortcut to model class OrmarConfig set on QuerySet model.
:return: OrmarConfig of the model
:rtype: model's OrmarConfig
"""
if not self.model_cls: # pragma nocover
raise ValueError("Model class of QuerySet is not initialized")
return self.model_cls.ormar_config
@property
def model(self) -> Type["T"]:
"""
Shortcut to model class set on QuerySet.
:return: model class
:rtype: Type[Model]
"""
if not self.model_cls: # pragma nocover
raise ValueError("Model class of QuerySet is not initialized")
return self.model_cls
def rebuild_self( # noqa: CFQ002
self,
filter_clauses: Optional[List] = None,
exclude_clauses: Optional[List] = None,
select_related: Optional[List] = None,
limit_count: Optional[int] = None,
offset: Optional[int] = None,
excludable: Optional["ExcludableItems"] = None,
order_bys: Optional[List] = None,
prefetch_related: Optional[List] = None,
limit_raw_sql: Optional[bool] = None,
proxy_source_model: Optional[Type["Model"]] = None,
) -> "QuerySet":
"""
Method that returns new instance of queryset based on passed params,
all not passed params are taken from current values.
"""
overwrites = {
"select_related": "_select_related",
"offset": "query_offset",
"excludable": "_excludable",
"prefetch_related": "_prefetch_related",
"limit_raw_sql": "limit_sql_raw",
}
passed_args = locals()
def replace_if_none(arg_name: str) -> Any:
if passed_args.get(arg_name) is None:
return getattr(self, overwrites.get(arg_name, arg_name))
return passed_args.get(arg_name)
return self.__class__(
model_cls=self.model_cls,
filter_clauses=replace_if_none("filter_clauses"),
exclude_clauses=replace_if_none("exclude_clauses"),
select_related=replace_if_none("select_related"),
limit_count=replace_if_none("limit_count"),
offset=replace_if_none("offset"),
excludable=replace_if_none("excludable"),
order_bys=replace_if_none("order_bys"),
prefetch_related=replace_if_none("prefetch_related"),
limit_raw_sql=replace_if_none("limit_raw_sql"),
proxy_source_model=replace_if_none("proxy_source_model"),
)
async def _prefetch_related_models(
self, models: List["T"], rows: List
) -> List["T"]:
"""
Performs prefetch query for selected models names.
:param models: list of already parsed main Models from main query
:type models: List[Model]
:param rows: database rows from main query
:type rows: List[sqlalchemy.engine.result.RowProxy]
:return: list of models with prefetch models populated
:rtype: List[Model]
"""
query = PrefetchQuery(
model_cls=self.model,
excludable=self._excludable,
prefetch_related=self._prefetch_related,
select_related=self._select_related,
orders_by=self.order_bys,
)
return await query.prefetch_related(models=models) # type: ignore
async def _process_query_result_rows(self, rows: List) -> List["T"]:
"""
Process database rows and initialize ormar Model from each of the rows.
:param rows: list of database rows from query result
:type rows: List[sqlalchemy.engine.result.RowProxy]
:return: list of models
:rtype: List[Model]
"""
result_rows = []
for row in rows:
result_rows.append(
self.model.from_row(
row=row,
select_related=self._select_related,
excludable=self._excludable,
source_model=self.model,
proxy_source_model=self.proxy_source_model,
)
)
await asyncio.sleep(0)
if result_rows:
return self.model.merge_instances_list(result_rows) # type: ignore
return cast(List["T"], result_rows)
def _resolve_filter_groups(
self, groups: Any
) -> Tuple[List[FilterGroup], List[str]]:
"""
Resolves filter groups to populate FilterAction params in group tree.
:param groups: tuple of FilterGroups
:type groups: Any
:return: list of resolver groups
:rtype: Tuple[List[FilterGroup], List[str]]
"""
filter_groups = []
select_related = self._select_related
if groups:
for group in groups:
if not isinstance(group, FilterGroup):
raise QueryDefinitionError(
"Only ormar.and_ and ormar.or_ "
"can be passed as filter positional"
" arguments,"
"other values need to be passed by"
"keyword arguments"
)
_, select_related = group.resolve(
model_cls=self.model,
select_related=self._select_related,
filter_clauses=self.filter_clauses,
)
filter_groups.append(group)
return filter_groups, select_related
@staticmethod
def check_single_result_rows_count(rows: Sequence[Optional["T"]]) -> None:
"""
Verifies if the result has one and only one row.
:param rows: one element list of Models
:type rows: List[Model]
"""
if not rows or rows[0] is None:
raise NoMatch()
if len(rows) > 1:
raise MultipleMatches()
@property
def database(self) -> databases.Database:
"""
Shortcut to models database from OrmarConfig class.
:return: database
:rtype: databases.Database
"""
return self.model_config.database
@property
def table(self) -> sqlalchemy.Table:
"""
Shortcut to models table from OrmarConfig.
:return: database table
:rtype: sqlalchemy.Table
"""
return self.model_config.table
def build_select_expression(
self,
limit: Optional[int] = None,
offset: Optional[int] = None,
order_bys: Optional[List] = None,
) -> sqlalchemy.sql.Select:
"""
Constructs the actual database query used in the QuerySet.
If any of the params is not passed the QuerySet own value is used.
:param limit: number to limit the query
:type limit: int
:param offset: number to offset by
:type offset: int
:param order_bys: list of order-by fields names
:type order_bys: List
:return: built sqlalchemy select expression
:rtype: sqlalchemy.sql.selectable.Select
"""
qry = Query(
model_cls=self.model,
select_related=self._select_related,
filter_clauses=self.filter_clauses,
exclude_clauses=self.exclude_clauses,
offset=offset or self.query_offset,
excludable=self._excludable,
order_bys=order_bys or self.order_bys,
limit_raw_sql=self.limit_sql_raw,
limit_count=limit if limit is not None else self.limit_count,
)
exp = qry.build_select_expression()
# print("\n", exp.compile(compile_kwargs={"literal_binds": True}))
return exp
def filter( # noqa: A003
self, *args: Any, _exclude: bool = False, **kwargs: Any
) -> "QuerySet[T]":
"""
Allows you to filter by any `Model` attribute/field
as well as to fetch instances, with a filter across an FK relationship.
You can use special filter suffix to change the filter operands:
* exact - like `album__name__exact='Malibu'` (exact match)
* iexact - like `album__name__iexact='malibu'` (exact match case insensitive)
* contains - like `album__name__contains='Mal'` (sql like)
* icontains - like `album__name__icontains='mal'` (sql like case insensitive)
* in - like `album__name__in=['Malibu', 'Barclay']` (sql in)
* isnull - like `album__name__isnull=True` (sql is null)
(isnotnull `album__name__isnull=False` (sql is not null))
* gt - like `position__gt=3` (sql >)
* gte - like `position__gte=3` (sql >=)
* lt - like `position__lt=3` (sql <)
* lte - like `position__lte=3` (sql <=)
* startswith - like `album__name__startswith='Mal'` (exact start match)
* istartswith - like `album__name__istartswith='mal'` (case insensitive)
* endswith - like `album__name__endswith='ibu'` (exact end match)
* iendswith - like `album__name__iendswith='IBU'` (case insensitive)
Note that you can also use python style filters - check the docs!
:param _exclude: flag if it should be exclude or filter
:type _exclude: bool
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: filtered QuerySet
:rtype: QuerySet
"""
filter_groups, select_related = self._resolve_filter_groups(groups=args)
qryclause = QueryClause(
model_cls=self.model,
select_related=select_related,
filter_clauses=self.filter_clauses,
)
filter_clauses, select_related = qryclause.prepare_filter(**kwargs)
filter_clauses = filter_clauses + filter_groups # type: ignore
if _exclude:
exclude_clauses = filter_clauses
filter_clauses = self.filter_clauses
else:
exclude_clauses = self.exclude_clauses
filter_clauses = filter_clauses
return self.rebuild_self(
filter_clauses=filter_clauses,
exclude_clauses=exclude_clauses,
select_related=select_related,
)
def exclude(self, *args: Any, **kwargs: Any) -> "QuerySet[T]": # noqa: A003
"""
Works exactly the same as filter and all modifiers (suffixes) are the same,
but returns a *not* condition.
So if you use `filter(name='John')` which is `where name = 'John'` in SQL,
the `exclude(name='John')` equals to `where name <> 'John'`
Note that all conditions are joined so if you pass multiple values it
becomes a union of conditions.
`exclude(name='John', age>=35)` will become
`where not (name='John' and age>=35)`
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: filtered QuerySet
:rtype: QuerySet
"""
return self.filter(_exclude=True, *args, **kwargs)
def select_related(self, related: Union[List, str, FieldAccessor]) -> "QuerySet[T]":
"""
Allows to prefetch related models during the same query.
**With `select_related` always only one query is run against the database**,
meaning that one (sometimes complicated) join is generated and later nested
models are processed in python.
To fetch related model use `ForeignKey` names.
To chain related `Models` relation use double underscores between names.
:param related: list of relation field names, can be linked by '__' to nest
:type related: Union[List, str]
:return: QuerySet
:rtype: QuerySet
"""
if not isinstance(related, list):
related = [related]
related = [
rel._access_chain if isinstance(rel, FieldAccessor) else rel
for rel in related
]
related = sorted(list(set(list(self._select_related) + related)))
return self.rebuild_self(select_related=related)
def select_all(self, follow: bool = False) -> "QuerySet[T]":
"""
By default adds only directly related models.
If follow=True is set it adds also related models of related models.
To not get stuck in an infinite loop as related models also keep a relation
to parent model visited models set is kept.
That way already visited models that are nested are loaded, but the load do not
follow them inside. So Model A -> Model B -> Model C -> Model A -> Model X
will load second Model A but will never follow into Model X.
Nested relations of those kind need to be loaded manually.
:param follow: flag to trigger deep save -
by default only directly related models are saved
with follow=True also related models of related models are saved
:type follow: bool
:return: reloaded Model
:rtype: Model
"""
relations = list(self.model.extract_related_names())
if follow:
relations = self.model._iterate_related_models()
return self.rebuild_self(select_related=relations)
def prefetch_related(
self, related: Union[List, str, FieldAccessor]
) -> "QuerySet[T]":
"""
Allows to prefetch related models during query - but opposite to
`select_related` each subsequent model is fetched in a separate database query.
**With `prefetch_related` always one query per Model is run against the
database**, meaning that you will have multiple queries executed one
after another.
To fetch related model use `ForeignKey` names.
To chain related `Models` relation use double underscores between names.
:param related: list of relation field names, can be linked by '__' to nest
:type related: Union[List, str]
:return: QuerySet
:rtype: QuerySet
"""
if not isinstance(related, list):
related = [related]
related = [
rel._access_chain if isinstance(rel, FieldAccessor) else rel
for rel in related
]
related = list(set(list(self._prefetch_related) + related))
return self.rebuild_self(prefetch_related=related)
def fields(
self, columns: Union[List, str, Set, Dict], _is_exclude: bool = False
) -> "QuerySet[T]":
"""
With `fields()` you can select subset of model columns to limit the data load.
Note that `fields()` and `exclude_fields()` works both for main models
(on normal queries like `get`, `all` etc.)
as well as `select_related` and `prefetch_related`
models (with nested notation).
You can select specified fields by passing a `str, List[str], Set[str] or
dict` with nested definition.
To include related models use notation
`{related_name}__{column}[__{optional_next} etc.]`.
`fields()` can be called several times, building up the columns to select.
If you include related models into `select_related()` call but you won't specify
columns for those models in fields - implies a list of all fields for
those nested models.
Mandatory fields cannot be excluded as it will raise `ValidationError`,
to exclude a field it has to be nullable.
Pk column cannot be excluded - it's always auto added even if
not explicitly included.
You can also pass fields to include as dictionary or set.
To mark a field as included in a dictionary use it's name as key
and ellipsis as value.
To traverse nested models use nested dictionaries.
To include fields at last level instead of nested dictionary a set can be used.
To include whole nested model specify model related field name and ellipsis.
:param _is_exclude: flag if it's exclude or include operation
:type _is_exclude: bool
:param columns: columns to include
:type columns: Union[List, str, Set, Dict]
:return: QuerySet
:rtype: QuerySet
"""
excludable = ormar.ExcludableItems.from_excludable(self._excludable)
excludable.build(
items=columns,
model_cls=self.model_cls, # type: ignore
is_exclude=_is_exclude,
)
return self.rebuild_self(excludable=excludable)
def exclude_fields(self, columns: Union[List, str, Set, Dict]) -> "QuerySet[T]":
"""
With `exclude_fields()` you can select subset of model columns that will
be excluded to limit the data load.
It's the opposite of `fields()` method so check documentation above
to see what options are available.
Especially check above how you can pass also nested dictionaries
and sets as a mask to exclude fields from whole hierarchy.
Note that `fields()` and `exclude_fields()` works both for main models
(on normal queries like `get`, `all` etc.)
as well as `select_related` and `prefetch_related` models
(with nested notation).
Mandatory fields cannot be excluded as it will raise `ValidationError`,
to exclude a field it has to be nullable.
Pk column cannot be excluded - it's always auto added even
if explicitly excluded.
:param columns: columns to exclude
:type columns: Union[List, str, Set, Dict]
:return: QuerySet
:rtype: QuerySet
"""
return self.fields(columns=columns, _is_exclude=True)
def order_by(self, columns: Union[List, str, OrderAction]) -> "QuerySet[T]":
"""
With `order_by()` you can order the results from database based on your
choice of fields.
You can provide a string with field name or list of strings with fields names.
Ordering in sql will be applied in order of names you provide in order_by.
By default if you do not provide ordering `ormar` explicitly orders by
all primary keys
If you are sorting by nested models that causes that the result rows are
unsorted by the main model `ormar` will combine those children rows into
one main model.
The main model will never duplicate in the result
To order by main model field just provide a field name
To sort on nested models separate field names with dunder '__'.
You can sort this way across all relation types -> `ForeignKey`,
reverse virtual FK and `ManyToMany` fields.
To sort in descending order provide a hyphen in front of the field name
:param columns: columns by which models should be sorted
:type columns: Union[List, str]
:return: QuerySet
:rtype: QuerySet
"""
if not isinstance(columns, list):
columns = [columns]
orders_by = [
(
OrderAction(order_str=x, model_cls=self.model_cls) # type: ignore
if not isinstance(x, OrderAction)
else x
)
for x in columns
]
order_bys = self.order_bys + [x for x in orders_by if x not in self.order_bys]
return self.rebuild_self(order_bys=order_bys)
async def values(
self,
fields: Union[List, str, Set, Dict, None] = None,
exclude_through: bool = False,
_as_dict: bool = True,
_flatten: bool = False,
) -> List:
"""
Return a list of dictionaries with column values in order of the fields
passed or all fields from queried models.
To filter for given row use filter/exclude methods before values,
to limit number of rows use limit/offset or paginate before values.
Note that it always return a list even for one row from database.
:param exclude_through: flag if through models should be excluded
:type exclude_through: bool
:param _flatten: internal parameter to flatten one element tuples
:type _flatten: bool
:param _as_dict: internal parameter if return dict or tuples
:type _as_dict: bool
:param fields: field name or list of field names to extract from db
:type fields: Union[List, str, Set, Dict]
"""
if fields:
return await self.fields(columns=fields).values(
_as_dict=_as_dict, _flatten=_flatten, exclude_through=exclude_through
)
expr = self.build_select_expression()
rows = await self.database.fetch_all(expr)
if not rows:
return []
alias_resolver = ReverseAliasResolver(
select_related=self._select_related,
excludable=self._excludable,
model_cls=self.model_cls, # type: ignore
exclude_through=exclude_through,
)
column_map = alias_resolver.resolve_columns(columns_names=list(rows[0].keys())) # type: ignore
result = [
{column_map.get(k): v for k, v in dict(x).items() if k in column_map}
for x in rows
]
if _as_dict:
return result
if _flatten and self._excludable.include_entry_count() != 1:
raise QueryDefinitionError(
"You cannot flatten values_list if more than one field is selected!"
)
tuple_result = [tuple(x.values()) for x in result]
return tuple_result if not _flatten else [x[0] for x in tuple_result]
async def values_list(
self,
fields: Union[List, str, Set, Dict, None] = None,
flatten: bool = False,
exclude_through: bool = False,
) -> List:
"""
Return a list of tuples with column values in order of the fields passed or
all fields from queried models.
When one field is passed you can flatten the list of tuples into list of values
of that single field.
To filter for given row use filter/exclude methods before values,
to limit number of rows use limit/offset or paginate before values.
Note that it always return a list even for one row from database.
:param exclude_through: flag if through models should be excluded
:type exclude_through: bool
:param fields: field name or list of field names to extract from db
:type fields: Union[str, List[str]]
:param flatten: when one field is passed you can flatten the list of tuples
:type flatten: bool
"""
return await self.values(
fields=fields,
exclude_through=exclude_through,
_as_dict=False,
_flatten=flatten,
)
async def exists(self) -> bool:
"""
Returns a bool value to confirm if there are rows matching the given criteria
(applied with `filter` and `exclude` if set).
:return: result of the check
:rtype: bool
"""
expr = self.build_select_expression()
expr = sqlalchemy.exists(expr).select()
return await self.database.fetch_val(expr)
async def count(self, distinct: bool = True) -> int:
"""
Returns number of rows matching the given criteria
(applied with `filter` and `exclude` if set before).
If `distinct` is `True` (the default), this will return
the number of primary rows selected. If `False`,
the count will be the total number of rows returned
(including extra rows for `one-to-many` or `many-to-many`
left `select_related` table joins).
`False` is the legacy (buggy) behavior for workflows that depend on it.
:param distinct: flag if the primary table rows should be distinct or not
:return: number of rows
:rtype: int
"""
expr = self.build_select_expression().alias("subquery_for_count")
expr = sqlalchemy.func.count().select().select_from(expr) # type: ignore
if distinct:
pk_column_name = self.model.get_column_alias(self.model_config.pkname)
expr_distinct = expr.group_by(pk_column_name).alias("subquery_for_group") # type: ignore
expr = sqlalchemy.func.count().select().select_from(expr_distinct) # type: ignore
return await self.database.fetch_val(expr)
async def _query_aggr_function(self, func_name: str, columns: List) -> Any:
func = getattr(sqlalchemy.func, func_name)
select_actions = [
SelectAction(select_str=column, model_cls=self.model) for column in columns
]
if func_name in ["sum", "avg"]:
if any(not x.is_numeric for x in select_actions):
raise QueryDefinitionError(
"You can use sum and svg only with" "numeric types of columns"
)
select_columns = [x.apply_func(func, use_label=True) for x in select_actions]
expr = self.build_select_expression().alias(f"subquery_for_{func_name}")
expr = sqlalchemy.select(*select_columns).select_from(expr) # type: ignore
# print("\n", expr.compile(compile_kwargs={"literal_binds": True}))
result = await self.database.fetch_one(expr)
return dict(result) if len(result) > 1 else result[0] # type: ignore
async def max(self, columns: Union[str, List[str]]) -> Any: # noqa: A003
"""
Returns max value of columns for rows matching the given criteria
(applied with `filter` and `exclude` if set before).
:return: max value of column(s)
:rtype: Any
"""
if not isinstance(columns, list):
columns = [columns]
return await self._query_aggr_function(func_name="max", columns=columns)
async def min(self, columns: Union[str, List[str]]) -> Any: # noqa: A003
"""
Returns min value of columns for rows matching the given criteria
(applied with `filter` and `exclude` if set before).
:return: min value of column(s)
:rtype: Any
"""
if not isinstance(columns, list):
columns = [columns]
return await self._query_aggr_function(func_name="min", columns=columns)
async def sum(self, columns: Union[str, List[str]]) -> Any: # noqa: A003
"""
Returns sum value of columns for rows matching the given criteria
(applied with `filter` and `exclude` if set before).
:return: sum value of columns
:rtype: int
"""
if not isinstance(columns, list):
columns = [columns]
return await self._query_aggr_function(func_name="sum", columns=columns)
async def avg(self, columns: Union[str, List[str]]) -> Any:
"""
Returns avg value of columns for rows matching the given criteria
(applied with `filter` and `exclude` if set before).
:return: avg value of columns
:rtype: Union[int, float, List]
"""
if not isinstance(columns, list):
columns = [columns]
return await self._query_aggr_function(func_name="avg", columns=columns)
async def update(self, each: bool = False, **kwargs: Any) -> int:
"""
Updates the model table after applying the filters from kwargs.
You have to either pass a filter to narrow down a query or explicitly pass
each=True flag to affect whole table.
:param each: flag if whole table should be affected if no filter is passed
:type each: bool
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: number of updated rows
:rtype: int
"""
if not each and not (self.filter_clauses or self.exclude_clauses):
raise QueryDefinitionError(
"You cannot update without filtering the queryset first. "
"If you want to update all rows use update(each=True, **kwargs)"
)
self_fields = self.model.extract_db_own_fields().union(
self.model.extract_related_names()
)
updates = {k: v for k, v in kwargs.items() if k in self_fields}
updates = self.model.validate_enums(updates)
updates = self.model.translate_columns_to_aliases(updates)
expr = FilterQuery(filter_clauses=self.filter_clauses).apply(
self.table.update().values(**updates) # type: ignore
)
expr = FilterQuery(filter_clauses=self.exclude_clauses, exclude=True).apply(
expr
)
return await self.database.execute(expr)
async def delete(self, *args: Any, each: bool = False, **kwargs: Any) -> int:
"""
Deletes from the model table after applying the filters from kwargs.
You have to either pass a filter to narrow down a query or explicitly pass
each=True flag to affect whole table.
:param each: flag if whole table should be affected if no filter is passed
:type each: bool
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: number of deleted rows
:rtype:int
"""
if kwargs or args:
return await self.filter(*args, **kwargs).delete()
if not each and not (self.filter_clauses or self.exclude_clauses):
raise QueryDefinitionError(
"You cannot delete without filtering the queryset first. "
"If you want to delete all rows use delete(each=True)"
)
expr = FilterQuery(filter_clauses=self.filter_clauses).apply(
self.table.delete() # type: ignore
)
expr = FilterQuery(filter_clauses=self.exclude_clauses, exclude=True).apply(
expr
)
return await self.database.execute(expr)
def paginate(self, page: int, page_size: int = 20) -> "QuerySet[T]":
"""
You can paginate the result which is a combination of offset and limit clauses.
Limit is set to page size and offset is set to (page-1) * page_size.
:param page_size: numbers of items per page
:type page_size: int
:param page: page number
:type page: int
:return: QuerySet
:rtype: QuerySet
"""
if page < 1 or page_size < 1:
raise QueryDefinitionError("Page size and page have to be greater than 0.")
limit_count = page_size
query_offset = (page - 1) * page_size
return self.rebuild_self(limit_count=limit_count, offset=query_offset)
def limit(
self, limit_count: int, limit_raw_sql: Optional[bool] = None
) -> "QuerySet[T]":
"""
You can limit the results to desired number of parent models.
To limit the actual number of database query rows instead of number of main
models use the `limit_raw_sql` parameter flag, and set it to `True`.
:param limit_raw_sql: flag if raw sql should be limited
:type limit_raw_sql: bool
:param limit_count: number of models to limit
:type limit_count: int
:return: QuerySet
:rtype: QuerySet
"""
limit_raw_sql = self.limit_sql_raw if limit_raw_sql is None else limit_raw_sql
return self.rebuild_self(limit_count=limit_count, limit_raw_sql=limit_raw_sql)
def offset(
self, offset: int, limit_raw_sql: Optional[bool] = None
) -> "QuerySet[T]":
"""
You can also offset the results by desired number of main models.
To offset the actual number of database query rows instead of number of main
models use the `limit_raw_sql` parameter flag, and set it to `True`.
:param limit_raw_sql: flag if raw sql should be offset
:type limit_raw_sql: bool
:param offset: numbers of models to offset
:type offset: int
:return: QuerySet
:rtype: QuerySet
"""
limit_raw_sql = self.limit_sql_raw if limit_raw_sql is None else limit_raw_sql
return self.rebuild_self(offset=offset, limit_raw_sql=limit_raw_sql)
async def first(self, *args: Any, **kwargs: Any) -> "T":
"""
Gets the first row from the db ordered by primary key column ascending.
:raises NoMatch: if no rows are returned
:raises MultipleMatches: if more than 1 row is returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: returned model
:rtype: Model
"""
if kwargs or args:
return await self.filter(*args, **kwargs).first()
expr = self.build_select_expression(
limit=1,
order_bys=(
[
OrderAction(
order_str=f"{self.model.ormar_config.pkname}",
model_cls=self.model_cls, # type: ignore
)
]
if not any([x.is_source_model_order for x in self.order_bys])
else []
)
+ self.order_bys,
)
rows = await self.database.fetch_all(expr)
processed_rows = await self._process_query_result_rows(rows)
if self._prefetch_related and processed_rows:
processed_rows = await self._prefetch_related_models(processed_rows, rows)
self.check_single_result_rows_count(processed_rows)
return processed_rows[0] # type: ignore
async def first_or_none(self, *args: Any, **kwargs: Any) -> Optional["T"]:
"""
Gets the first row from the db ordered by primary key column ascending.
If no match is found None will be returned.
:raises MultipleMatches: if more than 1 row is returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: returned model
:rtype: Model
"""
try:
return await self.first(*args, **kwargs)
except ormar.NoMatch:
return None
async def get_or_none(self, *args: Any, **kwargs: Any) -> Optional["T"]:
"""
Gets the first row from the db meeting the criteria set by kwargs.
If no criteria set it will return the last row in db sorted by pk.
Passing a criteria is actually calling filter(*args, **kwargs) method described
below.
If no match is found None will be returned.
:raises MultipleMatches: if more than 1 row is returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: returned model
:rtype: Model
"""
try:
return await self.get(*args, **kwargs)
except ormar.NoMatch:
return None
async def get(self, *args: Any, **kwargs: Any) -> "T": # noqa: CCR001
"""
Gets the first row from the db meeting the criteria set by kwargs.
If no criteria set it will return the last row in db sorted by pk.
Passing a criteria is actually calling filter(*args, **kwargs) method described
below.
:raises NoMatch: if no rows are returned
:raises MultipleMatches: if more than 1 row is returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: returned model
:rtype: Model
"""
if kwargs or args:
return await self.filter(*args, **kwargs).get()
if not self.filter_clauses:
expr = self.build_select_expression(
limit=1,
order_bys=(
[
OrderAction(
order_str=f"-{self.model.ormar_config.pkname}",
model_cls=self.model_cls, # type: ignore
)
]
if not any([x.is_source_model_order for x in self.order_bys])
else []
)
+ self.order_bys,
)
else:
expr = self.build_select_expression()
rows = await self.database.fetch_all(expr)
processed_rows = await self._process_query_result_rows(rows)
if self._prefetch_related and processed_rows:
processed_rows = await self._prefetch_related_models(processed_rows, rows)
self.check_single_result_rows_count(processed_rows)
return processed_rows[0] # type: ignore
async def get_or_create(
self,
_defaults: Optional[Dict[str, Any]] = None,
*args: Any,
**kwargs: Any,
) -> Tuple["T", bool]:
"""
Combination of create and get methods.
Tries to get a row meeting the criteria for kwargs
and if `NoMatch` exception is raised
it creates a new one with given kwargs and _defaults.
Passing a criteria is actually calling filter(*args, **kwargs) method described
below.
:param kwargs: fields names and proper value types
:type kwargs: Any
:param _defaults: default values for creating object
:type _defaults: Optional[Dict[str, Any]]
:return: model instance and a boolean
:rtype: Tuple("T", bool)
"""
try:
return await self.get(*args, **kwargs), False
except NoMatch:
_defaults = _defaults or {}
return await self.create(**{**kwargs, **_defaults}), True
async def update_or_create(self, **kwargs: Any) -> "T":
"""
Updates the model, or in case there is no match in database creates a new one.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: updated or created model
:rtype: Model
"""
pk_name = self.model_config.pkname
if "pk" in kwargs:
kwargs[pk_name] = kwargs.pop("pk")
if pk_name not in kwargs or kwargs.get(pk_name) is None:
return await self.create(**kwargs)
model = await self.get(pk=kwargs[pk_name])
return await model.update(**kwargs)
async def all(self, *args: Any, **kwargs: Any) -> List["T"]: # noqa: A003
"""
Returns all rows from a database for given model for set filter options.
Passing args and/or kwargs is a shortcut and equals to calling
`filter(*args, **kwargs).all()`.
If there are no rows meeting the criteria an empty list is returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: list of returned models
:rtype: List[Model]
"""
if kwargs or args:
return await self.filter(*args, **kwargs).all()
expr = self.build_select_expression()
rows = await self.database.fetch_all(expr)
result_rows = await self._process_query_result_rows(rows)
if self._prefetch_related and result_rows:
result_rows = await self._prefetch_related_models(result_rows, rows)
return result_rows
async def iterate( # noqa: A003
self,
*args: Any,
**kwargs: Any,
) -> AsyncGenerator["T", None]:
"""
Return async iterable generator for all rows from a database for given model.
Passing args and/or kwargs is a shortcut and equals to calling
`filter(*args, **kwargs).iterate()`.
If there are no rows meeting the criteria an empty async generator is returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: asynchronous iterable generator of returned models
:rtype: AsyncGenerator[Model]
"""
if self._prefetch_related:
raise QueryDefinitionError(
"Prefetch related queries are not supported in iterators"
)
if kwargs or args:
async for result in self.filter(*args, **kwargs).iterate():
yield result
return
expr = self.build_select_expression()
rows: list = []
last_primary_key = None
pk_alias = self.model.get_column_alias(self.model_config.pkname)
async for row in self.database.iterate(query=expr):
current_primary_key = row[pk_alias]
if last_primary_key == current_primary_key or last_primary_key is None:
last_primary_key = current_primary_key
rows.append(row)
continue
yield (await self._process_query_result_rows(rows))[0]
last_primary_key = current_primary_key
rows = [row]
if rows:
yield (await self._process_query_result_rows(rows))[0]
async def create(self, **kwargs: Any) -> "T":
"""
Creates the model instance, saves it in a database and returns the updates model
(with pk populated if not passed and autoincrement is set).
The allowed kwargs are `Model` fields names and proper value types.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: created model
:rtype: Model
"""
instance = self.model(**kwargs)
instance = await instance.save()
return instance
async def bulk_create(self, objects: List["T"]) -> None:
"""
Performs a bulk create in one database session to speed up the process.
Allows you to create multiple objects at once.
A valid list of `Model` objects needs to be passed.
Bulk operations do not send signals.
:param objects: list of ormar models already initialized and ready to save.
:type objects: List[Model]
"""
if not objects:
raise ModelListEmptyError("Bulk create objects are empty!")
ready_objects = []
for obj in objects:
ready_objects.append(obj.prepare_model_to_save(obj.model_dump()))
await asyncio.sleep(0) # Allow context switching to prevent blocking
# don't use execute_many, as in databases it's executed in a loop
# instead of using execute_many from drivers
expr = self.table.insert().values(ready_objects)
await self.database.execute(expr)
for obj in objects:
obj.set_save_status(True)
async def bulk_update( # noqa: CCR001
self, objects: List["T"], columns: Optional[List[str]] = None
) -> None:
"""
Performs bulk update in one database session to speed up the process.
Allows you to update multiple instance at once.
All `Models` passed need to have primary key column populated.
You can also select which fields to update by passing `columns` list
as a list of string names.
Bulk operations do not send signals.
:param objects: list of ormar models
:type objects: List[Model]
:param columns: list of columns to update
:type columns: List[str]
"""
if not objects:
raise ModelListEmptyError("Bulk update objects are empty!")
ready_objects = []
pk_name = self.model_config.pkname
if not columns:
columns = list(
self.model.extract_db_own_fields().union(
self.model.extract_related_names()
)
)
if pk_name not in columns:
columns.append(pk_name)
columns = [self.model.get_column_alias(k) for k in columns]
for obj in objects:
new_kwargs = obj.model_dump()
if new_kwargs.get(pk_name) is None:
raise ModelPersistenceError(
"You cannot update unsaved objects. "
f"{self.model.__name__} has to have {pk_name} filled."
)
new_kwargs = obj.prepare_model_to_update(new_kwargs)
ready_objects.append(
{"new_" + k: v for k, v in new_kwargs.items() if k in columns}
)
await asyncio.sleep(0)
pk_column = self.model_config.table.c.get(self.model.get_column_alias(pk_name))
pk_column_name = self.model.get_column_alias(pk_name)
table_columns = [c.name for c in self.model_config.table.c]
expr = self.table.update().where(
pk_column == bindparam("new_" + pk_column_name)
)
expr = expr.values(
**{
k: bindparam("new_" + k)
for k in columns
if k != pk_column_name and k in table_columns
}
)
# databases bind params only where query is passed as string
# otherwise it just passes all data to values and results in unconsumed columns
expr = str(expr) # type: ignore
await self.database.execute_many(expr, ready_objects)
for obj in objects:
obj.set_save_status(True)
await cast(
Type["Model"], self.model_cls
).ormar_config.signals.post_bulk_update.send(
sender=self.model_cls, instances=objects # type: ignore
)
collerek-ormar-c09209a/ormar/queryset/reverse_alias_resolver.py 0000664 0000000 0000000 00000021665 15130200524 0025103 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Dict, List, Type, cast
if TYPE_CHECKING: # pragma: no cover
from ormar import ForeignKeyField, Model
from ormar.models.excludable import Excludable, ExcludableItems
class ReverseAliasResolver:
"""
Class is used to reverse resolve table aliases into relation strings
to parse raw data columns and replace table prefixes with full relation string
"""
def __init__(
self,
model_cls: Type["Model"],
excludable: "ExcludableItems",
select_related: List[str],
exclude_through: bool = False,
) -> None:
self.select_related = select_related
self.model_cls = model_cls
self.reversed_aliases = (
self.model_cls.ormar_config.alias_manager.reversed_aliases
)
self.excludable = excludable
self.exclude_through = exclude_through
self._fields: Dict[str, "ForeignKeyField"] = dict()
self._prefixes: Dict[str, str] = dict()
self._previous_prefixes: List[str] = [""]
self._resolved_names: Dict[str, str] = dict()
def resolve_columns(self, columns_names: List[str]) -> Dict:
"""
Takes raw query prefixed column and resolves the prefixes to
relation strings (relation names connected with dunders).
:param columns_names: list of column names with prefixes from query
:type columns_names: List[str]
:return: dictionary of prefix: resolved names
:rtype: Union[None, Dict[str, str]]
"""
self._create_prefixes_map()
for column_name in columns_names:
column_parts = column_name.split("_")
potential_prefix = column_parts[0]
if potential_prefix in self.reversed_aliases:
self._resolve_column_with_prefix(
column_name=column_name, prefix=potential_prefix
)
else:
allowed_columns = self.model_cls.own_table_columns(
model=self.model_cls,
excludable=self.excludable,
add_pk_columns=False,
)
if column_name in allowed_columns:
self._resolved_names[column_name] = column_name
return self._resolved_names
def _resolve_column_with_prefix(self, column_name: str, prefix: str) -> None:
"""
Takes the prefixed column, checks if field should be excluded, and if not
it proceeds to replace prefix of a table with full relation string.
Sample: translates: "xsd12df_name" -> into: "posts__user__name"
:param column_name: prefixed name of the column
:type column_name: str
:param prefix: extracted prefix
:type prefix: str
"""
relation = self.reversed_aliases.get(prefix, None)
relation_str = self._prefixes.get(relation, None)
field = self._fields.get(relation, None)
if relation_str is None or field is None:
return
is_through = field.is_multi and field.through.get_name() in relation_str
if self._check_if_field_is_excluded(
prefix=prefix, field=field, is_through=is_through
):
return
target_model = field.through if is_through else field.to
allowed_columns = target_model.own_table_columns(
model=target_model,
excludable=self.excludable,
alias=prefix,
add_pk_columns=False,
)
new_column_name = column_name.replace(f"{prefix}_", "")
if new_column_name in allowed_columns:
self._resolved_names[column_name] = column_name.replace(
f"{prefix}_", f"{relation_str}__"
)
def _check_if_field_is_excluded(
self, prefix: str, field: "ForeignKeyField", is_through: bool
) -> bool:
"""
Checks if given relation is excluded in current query.
Note that in contrary to other queryset methods here you can exclude the
in-between models but keep the end columns, which does not make sense
when parsing the raw data into models.
So in relation category -> category_x_post -> post -> user you can exclude
category_x_post and post models but can keep the user one. (in ormar model
context that is not possible as if you would exclude through and post model
there would be no way to reach user model).
Exclusions happen on a model before the current one, so we need to move back
in chain of model by one or by two (m2m relations have through model in between)
:param prefix: table alias
:type prefix: str
:param field: field with relation
:type field: ForeignKeyField
:param is_through: flag if current table is a through table
:type is_through: bool
:return: result of the check
:rtype: bool
"""
shift, field_name = 1, field.name
if is_through:
field_name = field.through.get_name()
elif field.is_multi:
shift = 2
previous_excludable = self._get_previous_excludable(
prefix=prefix, field=field, shift=shift
)
return previous_excludable.is_excluded(field_name)
def _get_previous_excludable(
self, prefix: str, field: "ForeignKeyField", shift: int = 1
) -> "Excludable":
"""
Returns excludable related to model previous in chain of models.
Used to check if current model should be excluded.
:param prefix: prefix of a current table
:type prefix: str
:param field: field with relation
:type field: ForeignKeyField
:param shift: how many model back to go - for m2m it's 2 due to through models
:type shift: int
:return: excludable for previous model
:rtype: Excludable
"""
if prefix not in self._previous_prefixes:
self._previous_prefixes.append(prefix)
previous_prefix_ind = self._previous_prefixes.index(prefix)
previous_prefix = (
self._previous_prefixes[previous_prefix_ind - shift]
if previous_prefix_ind > (shift - 1)
else ""
)
return self.excludable.get(field.owner, alias=previous_prefix)
def _create_prefixes_map(self) -> None:
"""
Creates a map of alias manager aliases keys to relation strings.
I.e in alias manager you can have alias user_roles: xas12ad
This method will create entry user_roles: roles, where roles is a name of
relation on user model.
Will also keep the relation field in separate dictionary so we can later
extract field names and owner models.
"""
for related in self.select_related:
model_cls = self.model_cls
related_split = related.split("__")
related_str = ""
for relation in related_split:
previous_related_str = f"{related_str}__" if related_str else ""
new_related_str = previous_related_str + relation
field = model_cls.ormar_config.model_fields[relation]
field = cast("ForeignKeyField", field)
prefix_name = self._handle_through_fields_and_prefix(
model_cls=model_cls,
field=field,
previous_related_str=previous_related_str,
relation=relation,
)
self._prefixes[prefix_name] = new_related_str
self._fields[prefix_name] = field
model_cls = field.to
related_str = new_related_str
def _handle_through_fields_and_prefix(
self,
model_cls: Type["Model"],
field: "ForeignKeyField",
previous_related_str: str,
relation: str,
) -> str:
"""
Registers through models for m2m relations and switches prefix for
the one linking from through model to target model.
For other relations returns current model name + relation name as prefix.
Nested relations are a chain of relation names with __ in between.
:param model_cls: model of current relation
:type model_cls: Type["Model"]
:param field: field with relation
:type field: ForeignKeyField
:param previous_related_str: concatenated chain linked with "__"
:type previous_related_str: str
:param relation: name of the current relation in chain
:type relation: str
:return: name of prefix to populate
:rtype: str
"""
prefix_name = f"{model_cls.get_name()}_{relation}"
if field.is_multi:
through_name = field.through.get_name()
if not self.exclude_through:
self._fields[prefix_name] = field
new_through_str = previous_related_str + through_name
self._prefixes[prefix_name] = new_through_str
prefix_name = f"{through_name}_{field.default_target_field_name()}"
return prefix_name
collerek-ormar-c09209a/ormar/queryset/utils.py 0000664 0000000 0000000 00000022023 15130200524 0021463 0 ustar 00root root 0000000 0000000 import collections.abc
import copy
from typing import (
TYPE_CHECKING,
Any,
Dict,
List,
Optional,
Set,
Tuple,
Type,
Union,
)
if TYPE_CHECKING: # pragma no cover
from ormar import BaseField, Model
def check_node_not_dict_or_not_last_node(
part: str, is_last: bool, current_level: Any
) -> bool:
"""
Checks if given name is not present in the current level of the structure.
Checks if given name is not the last name in the split list of parts.
Checks if the given name in current level is not a dictionary.
All those checks verify if there is a need for deeper traversal.
:param part:
:type part: str
:param is_last: flag to check if last element
:type is_last: bool
:param current_level: current level of the traversed structure
:type current_level: Any
:return: result of the check
:rtype: bool
"""
return (part not in current_level and not is_last) or (
part in current_level and not isinstance(current_level[part], dict)
)
def translate_list_to_dict( # noqa: CCR001
list_to_trans: Union[List, Set], default: Any = ...
) -> Dict:
"""
Splits the list of strings by '__' and converts them to dictionary with nested
models grouped by parent model. That way each model appears only once in the whole
dictionary and children are grouped under parent name.
Default required key ise Ellipsis like in pydantic.
:param list_to_trans: input list
:type list_to_trans: Union[List, Set]
:param default: value to use as a default value
:type default: Any
:param is_order: flag if change affects order_by clauses are they require special
default value with sort order.
:type is_order: bool
:return: converted to dictionary input list
:rtype: Dict
"""
new_dict: Dict = dict()
for path in list_to_trans:
current_level = new_dict
parts = path.split("__")
def_val: Any = copy.deepcopy(default)
for ind, part in enumerate(parts):
is_last = ind == len(parts) - 1
if check_node_not_dict_or_not_last_node(
part=part, is_last=is_last, current_level=current_level
):
current_level[part] = dict()
elif part not in current_level:
current_level[part] = def_val
current_level = current_level[part]
return new_dict
def convert_set_to_required_dict(set_to_convert: set) -> Dict:
"""
Converts set to dictionary of required keys.
Required key is Ellipsis.
:param set_to_convert: set to convert to dict
:type set_to_convert: set
:return: set converted to dict of ellipsis
:rtype: Dict
"""
new_dict = dict()
for key in set_to_convert:
new_dict[key] = Ellipsis
return new_dict
def update(current_dict: Any, updating_dict: Any) -> Dict: # noqa: CCR001
"""
Update one dict with another but with regard for nested keys.
That way nested sets are unionised, dicts updated and
only other values are overwritten.
:param current_dict: dict to update
:type current_dict: Dict[str, ellipsis]
:param updating_dict: dict with values to update
:type updating_dict: Dict
:return: combination of both dicts
:rtype: Dict
"""
if current_dict is Ellipsis:
current_dict = dict()
for key, value in updating_dict.items():
if isinstance(value, collections.abc.Mapping):
old_key = current_dict.get(key, {})
if isinstance(old_key, set):
old_key = convert_set_to_required_dict(old_key)
current_dict[key] = update(old_key, value)
elif isinstance(value, set) and isinstance(current_dict.get(key), set):
current_dict[key] = current_dict.get(key).union(value)
else:
current_dict[key] = value
return current_dict
def subtract_dict(current_dict: Any, updating_dict: Any) -> Dict: # noqa: CCR001
"""
Update one dict with another but with regard for nested keys.
That way nested sets are unionised, dicts updated and
only other values are overwritten.
:param current_dict: dict to update
:type current_dict: Dict[str, ellipsis]
:param updating_dict: dict with values to update
:type updating_dict: Dict
:return: combination of both dicts
:rtype: Dict
"""
for key, value in updating_dict.items():
old_key = current_dict.get(key, {})
new_value: Optional[Union[Dict, Set]] = None
if not old_key:
continue
if isinstance(value, set) and isinstance(old_key, set):
new_value = old_key.difference(value)
elif isinstance(value, (set, collections.abc.Mapping)) and isinstance(
old_key, (set, collections.abc.Mapping)
):
value = (
convert_set_to_required_dict(value)
if not isinstance(value, collections.abc.Mapping)
else value
)
old_key = (
convert_set_to_required_dict(old_key)
if not isinstance(old_key, collections.abc.Mapping)
else old_key
)
new_value = subtract_dict(old_key, value)
if new_value:
current_dict[key] = new_value
else:
current_dict.pop(key, None)
return current_dict
def update_dict_from_list(curr_dict: Dict, list_to_update: Union[List, Set]) -> Dict:
"""
Converts the list into dictionary and later performs special update, where
nested keys that are sets or dicts are combined and not overwritten.
:param curr_dict: dict to update
:type curr_dict: Dict
:param list_to_update: list with values to update the dict
:type list_to_update: List[str]
:return: updated dict
:rtype: Dict
"""
updated_dict = copy.copy(curr_dict)
dict_to_update = translate_list_to_dict(list_to_update)
update(updated_dict, dict_to_update)
return updated_dict
def get_relationship_alias_model_and_str(
source_model: Type["Model"], related_parts: List
) -> Tuple[str, Type["Model"], str, bool]:
"""
Walks the relation to retrieve the actual model on which the clause should be
constructed, extracts alias based on last relation leading to target model.
:param related_parts: list of related names extracted from string
:type related_parts: Union[List, List[str]]
:param source_model: model from which relation starts
:type source_model: Type[Model]
:return: table prefix, target model and relation string
:rtype: Tuple[str, Type["Model"], str]
"""
table_prefix = ""
is_through = False
target_model = source_model
previous_model = target_model
previous_models = [target_model]
manager = target_model.ormar_config.alias_manager
for relation in related_parts[:]:
related_field = target_model.ormar_config.model_fields[relation]
if related_field.is_through:
(previous_model, relation, is_through) = _process_through_field(
related_parts=related_parts,
relation=relation,
related_field=related_field,
previous_model=previous_model,
previous_models=previous_models,
)
if related_field.is_multi:
previous_model = related_field.through
relation = related_field.default_target_field_name() # type: ignore
table_prefix = manager.resolve_relation_alias(
from_model=previous_model, relation_name=relation
)
target_model = related_field.to
previous_model = target_model
if not is_through:
previous_models.append(previous_model)
relation_str = "__".join(related_parts)
return table_prefix, target_model, relation_str, is_through
def _process_through_field(
related_parts: List,
relation: Optional[str],
related_field: "BaseField",
previous_model: Type["Model"],
previous_models: List[Type["Model"]],
) -> Tuple[Type["Model"], Optional[str], bool]:
"""
Helper processing through models as they need to be treated differently.
:param related_parts: split relation string
:type related_parts: List[str]
:param relation: relation name
:type relation: str
:param related_field: field with relation declaration
:type related_field: "ForeignKeyField"
:param previous_model: model from which relation is coming
:type previous_model: Type["Model"]
:param previous_models: list of already visited models in relation chain
:type previous_models: List[Type["Model"]]
:return: previous_model, relation, is_through
:rtype: Tuple[Type["Model"], str, bool]
"""
is_through = True
related_parts.remove(relation)
through_field = related_field.owner.ormar_config.model_fields[
related_field.related_name or ""
]
if len(previous_models) > 1 and previous_models[-2] == through_field.to:
previous_model = through_field.to
relation = through_field.related_name
else:
relation = related_field.related_name
return previous_model, relation, is_through
collerek-ormar-c09209a/ormar/relations/ 0000775 0000000 0000000 00000000000 15130200524 0020071 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/relations/__init__.py 0000664 0000000 0000000 00000000777 15130200524 0022215 0 ustar 00root root 0000000 0000000 """
Package handles relations on models, returning related models on calls and exposing
QuerySetProxy for m2m and reverse relations.
"""
from ormar.relations.alias_manager import AliasManager
from ormar.relations.relation import Relation, RelationType
from ormar.relations.relation_manager import RelationsManager
from ormar.relations.utils import get_relations_sides_and_names
__all__ = [
"AliasManager",
"Relation",
"RelationsManager",
"RelationType",
"get_relations_sides_and_names",
]
collerek-ormar-c09209a/ormar/relations/alias_manager.py 0000664 0000000 0000000 00000016432 15130200524 0023234 0 ustar 00root root 0000000 0000000 import string
import uuid
from random import choices
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, Union
import sqlalchemy
from sqlalchemy import Label
from sqlalchemy.sql.selectable import NamedFromClause
if TYPE_CHECKING: # pragma: no cover
from ormar import Model
from ormar.fields import ForeignKeyField
from ormar.models import ModelRow
def get_table_alias() -> str:
"""
Creates a random string that is used to alias tables in joins.
It's necessary that each relation has it's own aliases cause you can link
to the same target tables from multiple fields on one model as well as from
multiple different models in one join.
:return: randomly generated alias
:rtype: str
"""
alias = "".join(choices(string.ascii_uppercase, k=2)) + uuid.uuid4().hex[:4]
return alias.lower()
class AliasManager:
"""
Keep all aliases of relations between different tables.
One global instance is shared between all models.
"""
def __init__(self) -> None:
self._aliases_new: Dict[str, str] = dict()
self._reversed_aliases: Dict[str, str] = dict()
self._prefixed_tables: Dict[str, NamedFromClause] = dict()
def __contains__(self, item: str) -> bool:
return self._aliases_new.__contains__(item)
def __getitem__(self, key: str) -> Any:
return self._aliases_new.__getitem__(key)
@property
def reversed_aliases(self) -> Dict:
"""
Returns swapped key-value pairs from aliases where alias is the key.
:return: dictionary of prefix to relation
:rtype: Dict
"""
if self._reversed_aliases:
return self._reversed_aliases
reversed_aliases = {v: k for k, v in self._aliases_new.items()}
self._reversed_aliases = reversed_aliases
return self._reversed_aliases
@staticmethod
def prefixed_columns(
alias: str, table: sqlalchemy.Table, fields: Optional[List] = None
) -> List[Label[Any]]:
"""
Creates a list of aliases sqlalchemy text clauses from
string alias and sqlalchemy.Table.
Optional list of fields to include can be passed to extract only those columns.
List has to have sqlalchemy names of columns (ormar aliases) not the ormar ones.
:param alias: alias of given table
:type alias: str
:param table: table from which fields should be aliased
:type table: sqlalchemy.Table
:param fields: fields to include
:type fields: Optional[List[str]]
:return: list of sqlalchemy text clauses with "column name as aliased name"
:rtype: List[text]
"""
alias = f"{alias}_" if alias else ""
aliased_fields = [f"{alias}{x}" for x in fields] if fields else []
all_columns = (
table.columns
if not fields
else [
col
for col in table.columns
if col.name in fields or col.name in aliased_fields
]
)
return [column.label(f"{alias}{column.name}") for column in all_columns]
def prefixed_table_name(
self, alias: str, table: sqlalchemy.Table
) -> NamedFromClause:
"""
Creates text clause with table name with aliased name.
:param alias: alias of given table
:type alias: str
:param table: table
:type table: sqlalchemy.Table
:return: sqlalchemy text clause as "table_name aliased_name"
:rtype: sqlalchemy text clause
"""
full_alias = f"{alias}_{table.name}"
key = f"{full_alias}_{id(table)}"
return self._prefixed_tables.setdefault(key, table.alias(full_alias))
def add_relation_type(
self,
source_model: Type["Model"],
relation_name: str,
reverse_name: Optional[str] = None,
) -> None:
"""
Registers the relations defined in ormar models.
Given the relation it registers also the reverse side of this relation.
Used by both ForeignKey and ManyToMany relations.
Each relation is registered as Model name and relation name.
Each alias registered has to be unique.
Aliases are used to construct joins to assure proper links between tables.
That way you can link to the same target tables from multiple fields
on one model as well as from multiple different models in one join.
:param source_model: model with relation defined
:type source_model: source Model
:param relation_name: name of the relation to define
:type relation_name: str
:param reverse_name: name of related_name fo given relation for m2m relations
:type reverse_name: Optional[str]
:return: none
:rtype: None
"""
parent_key = f"{source_model.get_name()}_{relation_name}"
if parent_key not in self._aliases_new:
self.add_alias(parent_key)
to_field = source_model.ormar_config.model_fields[relation_name]
child_model = to_field.to
child_key = f"{child_model.get_name()}_{reverse_name}"
if child_key not in self._aliases_new:
self.add_alias(child_key)
def add_alias(self, alias_key: str) -> str:
"""
Adds alias to the dictionary of aliases under given key.
:param alias_key: key of relation to generate alias for
:type alias_key: str
:return: generated alias
:rtype: str
"""
alias = get_table_alias()
self._aliases_new[alias_key] = alias
return alias
def resolve_relation_alias(
self, from_model: Union[Type["Model"], Type["ModelRow"]], relation_name: str
) -> str:
"""
Given model and relation name returns the alias for this relation.
:param from_model: model with relation defined
:type from_model: source Model
:param relation_name: name of the relation field
:type relation_name: str
:return: alias of the relation
:rtype: str
"""
alias = self._aliases_new.get(f"{from_model.get_name()}_{relation_name}", "")
return alias
def resolve_relation_alias_after_complex(
self,
source_model: Union[Type["Model"], Type["ModelRow"]],
relation_str: str,
relation_field: "ForeignKeyField",
) -> str:
"""
Given source model and relation string returns the alias for this complex
relation if it exists, otherwise fallback to normal relation from a relation
field definition.
:param relation_field: field with direct relation definition
:type relation_field: "ForeignKeyField"
:param source_model: model with query starts
:type source_model: source Model
:param relation_str: string with relation joins defined
:type relation_str: str
:return: alias of the relation
:rtype: str
"""
alias = ""
if relation_str and "__" in relation_str:
alias = self.resolve_relation_alias(
from_model=source_model, relation_name=relation_str
)
if not alias:
alias = self.resolve_relation_alias(
from_model=relation_field.get_source_model(),
relation_name=relation_field.get_relation_name(),
)
return alias
collerek-ormar-c09209a/ormar/relations/querysetproxy.py 0000664 0000000 0000000 00000077677 15130200524 0023456 0 ustar 00root root 0000000 0000000 from _weakref import CallableProxyType
from typing import ( # noqa: I100, I201
TYPE_CHECKING,
Any,
AsyncGenerator,
Dict,
Generic,
List,
MutableSequence,
Optional,
Sequence,
Set,
Tuple,
Type,
TypeVar,
Union,
cast,
)
import ormar # noqa: I100, I202
from ormar.exceptions import ModelPersistenceError, NoMatch, QueryDefinitionError
if TYPE_CHECKING: # pragma no cover
from ormar import OrderAction, RelationType
from ormar.models import Model, T
from ormar.queryset import QuerySet
from ormar.relations import Relation
else:
T = TypeVar("T", bound="Model")
class QuerysetProxy(Generic[T]):
"""
Exposes QuerySet methods on relations, but also handles creating and removing
of through Models for m2m relations.
"""
if TYPE_CHECKING: # pragma no cover
relation: "Relation"
def __init__(
self,
relation: "Relation",
to: Type["T"],
type_: "RelationType",
qryset: Optional["QuerySet[T]"] = None,
) -> None:
self.relation: "Relation" = relation
self._queryset: Optional["QuerySet[T]"] = qryset
self.type_: "RelationType" = type_
self._owner: Union[CallableProxyType, "Model"] = self.relation.manager.owner
self.related_field_name = self._owner.ormar_config.model_fields[
self.relation.field_name
].get_related_name()
self.to: Type[T] = to
self.related_field = to.ormar_config.model_fields[self.related_field_name]
self.owner_pk_value = self._owner.pk
self.through_model_name = (
self.related_field.through.get_name()
if self.type_ == ormar.RelationType.MULTIPLE
else ""
)
@property
def queryset(self) -> "QuerySet[T]":
"""
Returns queryset if it's set, AttributeError otherwise.
:return: QuerySet
:rtype: QuerySet
"""
if not self._queryset:
raise AttributeError
return self._queryset
@queryset.setter
def queryset(self, value: "QuerySet") -> None:
"""
Set's the queryset. Initialized in RelationProxy.
:param value: QuerySet
:type value: QuerySet
"""
self._queryset = value
def _assign_child_to_parent(self, child: Optional["T"]) -> None:
"""
Registers child in parents RelationManager.
:param child: child to register on parent side.
:type child: Model
"""
if child:
owner = self._owner
rel_name = self.relation.field_name
setattr(owner, rel_name, child)
def _register_related(self, child: Union["T", Sequence[Optional["T"]]]) -> None:
"""
Registers child/ children in parents RelationManager.
:param child: child or list of children models to register.
:type child: Union[Model,List[Model]]
"""
if isinstance(child, list):
for subchild in child:
self._assign_child_to_parent(subchild)
else:
assert isinstance(child, ormar.Model)
child = cast("T", child)
self._assign_child_to_parent(child)
def _clean_items_on_load(self) -> None:
"""
Cleans the current list of the related models.
"""
if isinstance(self.relation.related_models, MutableSequence):
for item in self.relation.related_models[:]:
self.relation.remove(item)
async def create_through_instance(self, child: "T", **kwargs: Any) -> None:
"""
Crete a through model instance in the database for m2m relations.
:param kwargs: dict of additional keyword arguments for through instance
:type kwargs: Any
:param child: child model instance
:type child: Model
"""
model_cls = self.relation.through
owner_column = self.related_field.default_target_field_name() # type: ignore
child_column = self.related_field.default_source_field_name() # type: ignore
rel_kwargs = {owner_column: self._owner.pk, child_column: child.pk}
final_kwargs = {**rel_kwargs, **kwargs}
if child.pk is None:
raise ModelPersistenceError(
f"You cannot save {child.get_name()} "
f"model without primary key set! \n"
f"Save the child model first."
)
await model_cls(**final_kwargs).save()
async def update_through_instance(self, child: "T", **kwargs: Any) -> None:
"""
Updates a through model instance in the database for m2m relations.
:param kwargs: dict of additional keyword arguments for through instance
:type kwargs: Any
:param child: child model instance
:type child: Model
"""
model_cls = self.relation.through
owner_column = self.related_field.default_target_field_name() # type: ignore
child_column = self.related_field.default_source_field_name() # type: ignore
rel_kwargs = {owner_column: self._owner.pk, child_column: child.pk}
through_model = await model_cls.objects.get(**rel_kwargs)
await through_model.update(**kwargs)
async def upsert_through_instance(self, child: "T", **kwargs: Any) -> None:
"""
Updates a through model instance in the database for m2m relations if
it already exists, else creates one.
:param kwargs: dict of additional keyword arguments for through instance
:type kwargs: Any
:param child: child model instance
:type child: Model
"""
try:
await self.update_through_instance(child=child, **kwargs)
except NoMatch:
await self.create_through_instance(child=child, **kwargs)
async def delete_through_instance(self, child: "T") -> None:
"""
Removes through model instance from the database for m2m relations.
:param child: child model instance
:type child: Model
"""
queryset = ormar.QuerySet(model_cls=self.relation.through) # type: ignore
owner_column = self.related_field.default_target_field_name() # type: ignore
child_column = self.related_field.default_source_field_name() # type: ignore
kwargs = {owner_column: self._owner, child_column: child}
link_instance = await queryset.filter(**kwargs).get() # type: ignore
await link_instance.delete()
async def exists(self) -> bool:
"""
Returns a bool value to confirm if there are rows matching the given criteria
(applied with `filter` and `exclude` if set).
Actual call delegated to QuerySet.
:return: result of the check
:rtype: bool
"""
return await self.queryset.exists()
async def count(self, distinct: bool = True) -> int:
"""
Returns number of rows matching the given criteria
(applied with `filter` and `exclude` if set before).
If `distinct` is `True` (the default), this will return
the number of primary rows selected. If `False`,
the count will be the total number of rows returned
(including extra rows for `one-to-many` or `many-to-many`
left `select_related` table joins).
`False` is the legacy (buggy) behavior for workflows that depend on it.
Actual call delegated to QuerySet.
:param distinct: flag if the primary table rows should be distinct or not
:return: number of rows
:rtype: int
"""
return await self.queryset.count(distinct=distinct)
async def max(self, columns: Union[str, List[str]]) -> Any: # noqa: A003
"""
Returns max value of columns for rows matching the given criteria
(applied with `filter` and `exclude` if set before).
:return: max value of column(s)
:rtype: Any
"""
return await self.queryset.max(columns=columns)
async def min(self, columns: Union[str, List[str]]) -> Any: # noqa: A003
"""
Returns min value of columns for rows matching the given criteria
(applied with `filter` and `exclude` if set before).
:return: min value of column(s)
:rtype: Any
"""
return await self.queryset.min(columns=columns)
async def sum(self, columns: Union[str, List[str]]) -> Any: # noqa: A003
"""
Returns sum value of columns for rows matching the given criteria
(applied with `filter` and `exclude` if set before).
:return: sum value of columns
:rtype: int
"""
return await self.queryset.sum(columns=columns)
async def avg(self, columns: Union[str, List[str]]) -> Any:
"""
Returns avg value of columns for rows matching the given criteria
(applied with `filter` and `exclude` if set before).
:return: avg value of columns
:rtype: Union[int, float, List]
"""
return await self.queryset.avg(columns=columns)
async def clear(self, keep_reversed: bool = True) -> int:
"""
Removes all related models from given relation.
Removes all through models for m2m relation.
For reverse FK relations keep_reversed flag marks if the reversed models
should be kept or deleted from the database too (False means that models
will be deleted, and not only removed from relation).
:param keep_reversed: flag if reverse models in reverse FK should be deleted
or not, keep_reversed=False deletes them from database.
:type keep_reversed: bool
:return: number of deleted models
:rtype: int
"""
if self.type_ == ormar.RelationType.MULTIPLE:
queryset = ormar.QuerySet(model_cls=self.relation.through) # type: ignore
owner_column = self._owner.get_name()
else:
queryset = ormar.QuerySet(model_cls=self.relation.to) # type: ignore
owner_column = self.related_field_name
kwargs = {owner_column: self._owner}
self._clean_items_on_load()
if keep_reversed and self.type_ == ormar.RelationType.REVERSE:
update_kwrgs = {f"{owner_column}": None}
return await queryset.filter(_exclude=False, **kwargs).update(
each=False, **update_kwrgs
)
return await queryset.delete(**kwargs) # type: ignore
async def values(
self,
fields: Union[List, str, Set, Dict, None] = None,
exclude_through: bool = False,
) -> List:
"""
Return a list of dictionaries with column values in order of the fields
passed or all fields from queried models.
To filter for given row use filter/exclude methods before values,
to limit number of rows use limit/offset or paginate before values.
Note that it always return a list even for one row from database.
:param exclude_through: flag if through models should be excluded
:type exclude_through: bool
:param fields: field name or list of field names to extract from db
:type fields: Union[List, str, Set, Dict]
"""
return await self.queryset.values(
fields=fields, exclude_through=exclude_through
)
async def values_list(
self,
fields: Union[List, str, Set, Dict, None] = None,
flatten: bool = False,
exclude_through: bool = False,
) -> List:
"""
Return a list of tuples with column values in order of the fields passed or
all fields from queried models.
When one field is passed you can flatten the list of tuples into list of values
of that single field.
To filter for given row use filter/exclude methods before values,
to limit number of rows use limit/offset or paginate before values.
Note that it always return a list even for one row from database.
:param exclude_through: flag if through models should be excluded
:type exclude_through: bool
:param fields: field name or list of field names to extract from db
:type fields: Union[str, List[str]]
:param flatten: when one field is passed you can flatten the list of tuples
:type flatten: bool
"""
return await self.queryset.values(
fields=fields,
exclude_through=exclude_through,
_as_dict=False,
_flatten=flatten,
)
async def first(self, *args: Any, **kwargs: Any) -> "T":
"""
Gets the first row from the db ordered by primary key column ascending.
Actual call delegated to QuerySet.
Passing args and/or kwargs is a shortcut and equals to calling
`filter(*args, **kwargs).first()`.
List of related models is cleared before the call.
:param kwargs:
:type kwargs:
:return:
:rtype: _asyncio.Future
"""
first = await self.queryset.first(*args, **kwargs)
self._clean_items_on_load()
self._register_related(first)
return first
async def get_or_none(self, *args: Any, **kwargs: Any) -> Optional["T"]:
"""
Gets the first row from the db meeting the criteria set by kwargs.
If no criteria set it will return the last row in db sorted by pk.
Passing args and/or kwargs is a shortcut and equals to calling
`filter(*args, **kwargs).get_or_none()`.
If not match is found None will be returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: returned model
:rtype: Model
"""
try:
get = await self.queryset.get(*args, **kwargs)
except ormar.NoMatch:
return None
self._clean_items_on_load()
self._register_related(get)
return get
async def get(self, *args: Any, **kwargs: Any) -> "T":
"""
Gets the first row from the db meeting the criteria set by kwargs.
If no criteria set it will return the last row in db sorted by pk.
Passing args and/or kwargs is a shortcut and equals to calling
`filter(*args, **kwargs).get()`.
Actual call delegated to QuerySet.
List of related models is cleared before the call.
:raises NoMatch: if no rows are returned
:raises MultipleMatches: if more than 1 row is returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: returned model
:rtype: Model
"""
get = await self.queryset.get(*args, **kwargs)
self._clean_items_on_load()
self._register_related(get)
return get
async def all(self, *args: Any, **kwargs: Any) -> List["T"]: # noqa: A003
"""
Returns all rows from a database for given model for set filter options.
Passing args and/or kwargs is a shortcut and equals to calling
`filter(*args, **kwargs).all()`.
If there are no rows meeting the criteria an empty list is returned.
Actual call delegated to QuerySet.
List of related models is cleared before the call.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: list of returned models
:rtype: List[Model]
"""
all_items = await self.queryset.all(*args, **kwargs)
self._clean_items_on_load()
self._register_related(all_items)
return all_items
async def iterate( # noqa: A003
self,
*args: Any,
**kwargs: Any,
) -> AsyncGenerator["T", None]:
"""
Return async iterable generator for all rows from a database for given model.
Passing args and/or kwargs is a shortcut and equals to calling
`filter(*args, **kwargs).iterate()`.
If there are no rows meeting the criteria an empty async generator is returned.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: asynchronous iterable generator of returned models
:rtype: AsyncGenerator[Model]
"""
async for item in self.queryset.iterate(*args, **kwargs):
yield item
async def create(self, **kwargs: Any) -> "T":
"""
Creates the model instance, saves it in a database and returns the updates model
(with pk populated if not passed and autoincrement is set).
The allowed kwargs are `Model` fields names and proper value types.
For m2m relation the through model is created automatically.
Actual call delegated to QuerySet.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: created model
:rtype: Model
"""
through_kwargs = kwargs.pop(self.through_model_name, {})
if self.type_ == ormar.RelationType.REVERSE:
kwargs[self.related_field_name] = self._owner
created = await self.queryset.create(**kwargs)
self._register_related(created)
if self.type_ == ormar.RelationType.MULTIPLE:
await self.create_through_instance(created, **through_kwargs)
return created
async def update(self, each: bool = False, **kwargs: Any) -> int:
"""
Updates the model table after applying the filters from kwargs.
You have to either pass a filter to narrow down a query or explicitly pass
each=True flag to affect whole table.
:param each: flag if whole table should be affected if no filter is passed
:type each: bool
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: number of updated rows
:rtype: int
"""
# queryset proxy always have one filter for pk of parent model
if (
not each
and (len(self.queryset.filter_clauses) + len(self.queryset.exclude_clauses))
== 1
):
raise QueryDefinitionError(
"You cannot update without filtering the queryset first. "
"If you want to update all rows use update(each=True, **kwargs)"
)
through_kwargs = kwargs.pop(self.through_model_name, {})
children = await self.queryset.all()
for child in children:
await child.update(**kwargs) # type: ignore
if self.type_ == ormar.RelationType.MULTIPLE and through_kwargs:
await self.update_through_instance(
child=child, **through_kwargs # type: ignore
)
return len(children)
async def get_or_create(
self,
_defaults: Optional[Dict[str, Any]] = None,
*args: Any,
**kwargs: Any,
) -> Tuple["T", bool]:
"""
Combination of create and get methods.
Tries to get a row meeting the criteria for kwargs
and if `NoMatch` exception is raised
it creates a new one with given kwargs and _defaults.
:param kwargs: fields names and proper value types
:type kwargs: Any
:param _defaults: default values for creating object
:type _defaults: Optional[Dict[str, Any]]
:return: model instance and a boolean
:rtype: Tuple("T", bool)
"""
try:
return await self.get(*args, **kwargs), False
except NoMatch:
_defaults = _defaults or {}
return await self.create(**{**kwargs, **_defaults}), True
async def update_or_create(self, **kwargs: Any) -> "T":
"""
Updates the model, or in case there is no match in database creates a new one.
Actual call delegated to QuerySet.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: updated or created model
:rtype: Model
"""
pk_name = self.queryset.model_config.pkname
if "pk" in kwargs:
kwargs[pk_name] = kwargs.pop("pk")
if pk_name not in kwargs or kwargs.get(pk_name) is None:
return await self.create(**kwargs)
model = await self.queryset.get(pk=kwargs[pk_name])
return await model.update(**kwargs)
def filter( # noqa: A003, A001
self, *args: Any, **kwargs: Any
) -> "QuerysetProxy[T]":
"""
Allows you to filter by any `Model` attribute/field
as well as to fetch instances, with a filter across an FK relationship.
You can use special filter suffix to change the filter operands:
* exact - like `album__name__exact='Malibu'` (exact match)
* iexact - like `album__name__iexact='malibu'` (exact match case insensitive)
* contains - like `album__name__contains='Mal'` (sql like)
* icontains - like `album__name__icontains='mal'` (sql like case insensitive)
* in - like `album__name__in=['Malibu', 'Barclay']` (sql in)
* isnull - like `album__name__isnull=True` (sql is null)
(isnotnull `album__name__isnull=False` (sql is not null))
* gt - like `position__gt=3` (sql >)
* gte - like `position__gte=3` (sql >=)
* lt - like `position__lt=3` (sql <)
* lte - like `position__lte=3` (sql <=)
* startswith - like `album__name__startswith='Mal'` (exact start match)
* istartswith - like `album__name__istartswith='mal'` (case insensitive)
* endswith - like `album__name__endswith='ibu'` (exact end match)
* iendswith - like `album__name__iendswith='IBU'` (case insensitive)
Actual call delegated to QuerySet.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: filtered QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.filter(*args, **kwargs)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def exclude(
self, *args: Any, **kwargs: Any
) -> "QuerysetProxy[T]": # noqa: A003, A001
"""
Works exactly the same as filter and all modifiers (suffixes) are the same,
but returns a *not* condition.
So if you use `filter(name='John')` which is `where name = 'John'` in SQL,
the `exclude(name='John')` equals to `where name <> 'John'`
Note that all conditions are joined so if you pass multiple values it
becomes a union of conditions.
`exclude(name='John', age>=35)` will become
`where not (name='John' and age>=35)`
Actual call delegated to QuerySet.
:param kwargs: fields names and proper value types
:type kwargs: Any
:return: filtered QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.exclude(*args, **kwargs)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def select_all(self, follow: bool = False) -> "QuerysetProxy[T]":
"""
By default adds only directly related models.
If follow=True is set it adds also related models of related models.
To not get stuck in an infinite loop as related models also keep a relation
to parent model visited models set is kept.
That way already visited models that are nested are loaded, but the load do not
follow them inside. So Model A -> Model B -> Model C -> Model A -> Model X
will load second Model A but will never follow into Model X.
Nested relations of those kind need to be loaded manually.
:param follow: flag to trigger deep save -
by default only directly related models are saved
with follow=True also related models of related models are saved
:type follow: bool
:return: reloaded Model
:rtype: Model
"""
queryset = self.queryset.select_all(follow=follow)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def select_related(self, related: Union[List, str]) -> "QuerysetProxy[T]":
"""
Allows to prefetch related models during the same query.
**With `select_related` always only one query is run against the database**,
meaning that one (sometimes complicated) join is generated and later nested
models are processed in python.
To fetch related model use `ForeignKey` names.
To chain related `Models` relation use double underscores between names.
Actual call delegated to QuerySet.
:param related: list of relation field names, can be linked by '__' to nest
:type related: Union[List, str]
:return: QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.select_related(related)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def prefetch_related(self, related: Union[List, str]) -> "QuerysetProxy[T]":
"""
Allows to prefetch related models during query - but opposite to
`select_related` each subsequent model is fetched in a separate database query.
**With `prefetch_related` always one query per Model is run against the
database**, meaning that you will have multiple queries executed one
after another.
To fetch related model use `ForeignKey` names.
To chain related `Models` relation use double underscores between names.
Actual call delegated to QuerySet.
:param related: list of relation field names, can be linked by '__' to nest
:type related: Union[List, str]
:return: QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.prefetch_related(related)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def paginate(self, page: int, page_size: int = 20) -> "QuerysetProxy[T]":
"""
You can paginate the result which is a combination of offset and limit clauses.
Limit is set to page size and offset is set to (page-1) * page_size.
Actual call delegated to QuerySet.
:param page_size: numbers of items per page
:type page_size: int
:param page: page number
:type page: int
:return: QuerySet
:rtype: QuerySet
"""
queryset = self.queryset.paginate(page=page, page_size=page_size)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def limit(self, limit_count: int) -> "QuerysetProxy[T]":
"""
You can limit the results to desired number of parent models.
Actual call delegated to QuerySet.
:param limit_count: number of models to limit
:type limit_count: int
:return: QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.limit(limit_count)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def offset(self, offset: int) -> "QuerysetProxy[T]":
"""
You can also offset the results by desired number of main models.
Actual call delegated to QuerySet.
:param offset: numbers of models to offset
:type offset: int
:return: QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.offset(offset)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def fields(self, columns: Union[List, str, Set, Dict]) -> "QuerysetProxy[T]":
"""
With `fields()` you can select subset of model columns to limit the data load.
Note that `fields()` and `exclude_fields()` works both for main models
(on normal queries like `get`, `all` etc.)
as well as `select_related` and `prefetch_related`
models (with nested notation).
You can select specified fields by passing a `str, List[str], Set[str] or
dict` with nested definition.
To include related models use notation
`{related_name}__{column}[__{optional_next} etc.]`.
`fields()` can be called several times, building up the columns to select.
If you include related models into `select_related()` call but you won't specify
columns for those models in fields - implies a list of all fields for
those nested models.
Mandatory fields cannot be excluded as it will raise `ValidationError`,
to exclude a field it has to be nullable.
Pk column cannot be excluded - it's always auto added even if
not explicitly included.
You can also pass fields to include as dictionary or set.
To mark a field as included in a dictionary use it's name as key
and ellipsis as value.
To traverse nested models use nested dictionaries.
To include fields at last level instead of nested dictionary a set can be used.
To include whole nested model specify model related field name and ellipsis.
Actual call delegated to QuerySet.
:param columns: columns to include
:type columns: Union[List, str, Set, Dict]
:return: QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.fields(columns)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def exclude_fields(
self, columns: Union[List, str, Set, Dict]
) -> "QuerysetProxy[T]":
"""
With `exclude_fields()` you can select subset of model columns that will
be excluded to limit the data load.
It's the opposite of `fields()` method so check documentation above
to see what options are available.
Especially check above how you can pass also nested dictionaries
and sets as a mask to exclude fields from whole hierarchy.
Note that `fields()` and `exclude_fields()` works both for main models
(on normal queries like `get`, `all` etc.)
as well as `select_related` and `prefetch_related` models
(with nested notation).
Mandatory fields cannot be excluded as it will raise `ValidationError`,
to exclude a field it has to be nullable.
Pk column cannot be excluded - it's always auto added even
if explicitly excluded.
Actual call delegated to QuerySet.
:param columns: columns to exclude
:type columns: Union[List, str, Set, Dict]
:return: QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.exclude_fields(columns=columns)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
def order_by(self, columns: Union[List, str, "OrderAction"]) -> "QuerysetProxy[T]":
"""
With `order_by()` you can order the results from database based on your
choice of fields.
You can provide a string with field name or list of strings with fields names.
Ordering in sql will be applied in order of names you provide in order_by.
By default if you do not provide ordering `ormar` explicitly orders by
all primary keys
If you are sorting by nested models that causes that the result rows are
unsorted by the main model `ormar` will combine those children rows into
one main model.
The main model will never duplicate in the result
To order by main model field just provide a field name
To sort on nested models separate field names with dunder '__'.
You can sort this way across all relation types -> `ForeignKey`,
reverse virtual FK and `ManyToMany` fields.
To sort in descending order provide a hyphen in front of the field name
Actual call delegated to QuerySet.
:param columns: columns by which models should be sorted
:type columns: Union[List, str]
:return: QuerysetProxy
:rtype: QuerysetProxy
"""
queryset = self.queryset.order_by(columns)
return self.__class__(
relation=self.relation, type_=self.type_, to=self.to, qryset=queryset
)
collerek-ormar-c09209a/ormar/relations/relation.py 0000664 0000000 0000000 00000016161 15130200524 0022265 0 ustar 00root root 0000000 0000000 from enum import Enum
from typing import (
TYPE_CHECKING,
Generic,
List,
Optional,
Set,
Type,
TypeVar,
Union,
cast,
)
import ormar # noqa I100
from ormar.exceptions import RelationshipInstanceError # noqa I100
from ormar.relations.relation_proxy import RelationProxy
if TYPE_CHECKING: # pragma no cover
from ormar.models import Model, NewBaseModel, T
from ormar.relations import RelationsManager
else:
T = TypeVar("T", bound="Model")
class RelationType(Enum):
"""
Different types of relations supported by ormar:
* ForeignKey = PRIMARY
* reverse ForeignKey = REVERSE
* ManyToMany = MULTIPLE
"""
PRIMARY = 1
REVERSE = 2
MULTIPLE = 3
THROUGH = 4
class Relation(Generic[T]):
"""
Keeps related Models and handles adding/removing of the children.
"""
def __init__(
self,
manager: "RelationsManager",
type_: RelationType,
field_name: str,
to: Type["T"],
through: Optional[Type["Model"]] = None,
) -> None:
"""
Initialize the Relation and keep the related models either as instances of
passed Model, or as a RelationProxy which is basically a list of models with
some special behavior, as it exposes QuerySetProxy and allows querying the
related models already pre filtered by parent model.
:param manager: reference to relation manager
:type manager: RelationsManager
:param type_: type of the relation
:type type_: RelationType
:param field_name: name of the relation field
:type field_name: str
:param to: model to which relation leads to
:type to: Type[Model]
:param through: model through which relation goes for m2m relations
:type through: Type[Model]
"""
self.manager = manager
self._owner: "Model" = manager.owner
self._type: RelationType = type_
self._to_remove: Set = set()
self.to: Type["T"] = to
self._through = through
self.field_name: str = field_name
self.related_models: Optional[Union[RelationProxy, "Model"]] = (
RelationProxy(relation=self, type_=type_, to=to, field_name=field_name)
if type_ in (RelationType.REVERSE, RelationType.MULTIPLE)
else None
)
def clear(self) -> None:
if self._type in (RelationType.PRIMARY, RelationType.THROUGH):
self.related_models = None
self._owner.__dict__[self.field_name] = None
elif self.related_models is not None:
related_models = cast("RelationProxy", self.related_models)
related_models._clear()
self._owner.__dict__[self.field_name] = None
@property
def through(self) -> Type["Model"]:
if not self._through: # pragma: no cover
raise RelationshipInstanceError("Relation does not have through model!")
return self._through
def _clean_related(self) -> None:
"""
Removes dead weakrefs from RelationProxy.
"""
cleaned_data = [
x
for i, x in enumerate(self.related_models) # type: ignore
if i not in self._to_remove
]
self.related_models = RelationProxy(
relation=self,
type_=self._type,
to=self.to,
field_name=self.field_name,
data_=cleaned_data,
)
relation_name = self.field_name
self._owner.__dict__[relation_name] = cleaned_data
self._to_remove = set()
def _find_existing(
self, child: Union["NewBaseModel", Type["NewBaseModel"]]
) -> Optional[int]:
"""
Find child model in RelationProxy if exists.
:param child: child model to find
:type child: Model
:return: index of child in RelationProxy
:rtype: Optional[ind]
"""
if not isinstance(self.related_models, RelationProxy): # pragma nocover
raise ValueError("Cannot find existing models in parent relation type")
if child not in self.related_models:
return None
else:
# We need to clear the weakrefs that don't point to anything anymore
# There's an assumption here that if some of the related models
# went out of scope, then they all did, so we can just check the first one
try:
self.related_models[0].__repr__.__self__
return self.related_models.index(child)
except ReferenceError:
missing = self.related_models._get_list_of_missing_weakrefs()
self._to_remove.update(missing)
return self.related_models.index(child)
def add(self, child: "Model") -> None:
"""
Adds child Model to relation, either sets child as related model or adds
it to the list in RelationProxy depending on relation type.
:param child: model to add to relation
:type child: Model
"""
relation_name = self.field_name
if self._type in (RelationType.PRIMARY, RelationType.THROUGH):
self.related_models = child
self._owner.__dict__[relation_name] = child
else:
if self._find_existing(child) is None:
self.related_models.append(child) # type: ignore
rel = self._owner.__dict__.get(relation_name, [])
rel = rel or []
if not isinstance(rel, list):
rel = [rel]
self._populate_owner_side_dict(rel=rel, child=child)
self._owner.__dict__[relation_name] = rel
def _populate_owner_side_dict(self, rel: List["Model"], child: "Model") -> None:
try:
if child not in rel:
rel.append(child)
except ReferenceError:
rel.clear()
rel.append(child)
def remove(self, child: Union["NewBaseModel", Type["NewBaseModel"]]) -> None:
"""
Removes child Model from relation, either sets None as related model or removes
it from the list in RelationProxy depending on relation type.
:param child: model to remove from relation
:type child: Model
"""
relation_name = self.field_name
if self._type == RelationType.PRIMARY:
if self.related_models == child:
self.related_models = None
del self._owner.__dict__[relation_name]
else:
position = self._find_existing(child)
if position is not None:
self.related_models.pop(position) # type: ignore
del self._owner.__dict__[relation_name][position]
def get(self) -> Optional[Union[List["Model"], "Model"]]:
"""
Return the related model or models from RelationProxy.
:return: related model/models if set
:rtype: Optional[Union[List[Model], Model]]
"""
if self._to_remove:
self._clean_related()
return self.related_models
def __repr__(self) -> str: # pragma no cover
if self._to_remove:
self._clean_related()
return str(self.related_models)
collerek-ormar-c09209a/ormar/relations/relation_manager.py 0000664 0000000 0000000 00000013654 15130200524 0023763 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Type, Union
from weakref import proxy
from ormar.relations.relation import Relation, RelationType
from ormar.relations.utils import get_relations_sides_and_names
if TYPE_CHECKING: # pragma no cover
from ormar.fields import BaseField, ForeignKeyField
from ormar.models import Model, NewBaseModel
class RelationsManager:
"""
Manages relations on a Model, each Model has it's own instance.
"""
def __init__(
self,
related_fields: Optional[List["ForeignKeyField"]] = None,
owner: Optional["Model"] = None,
) -> None:
self.owner = proxy(owner)
self._related_fields = related_fields or []
self._related_names = [field.name for field in self._related_fields]
self._relations: Dict[str, Relation] = dict()
for field in self._related_fields:
self._add_relation(field)
def __contains__(self, item: str) -> bool:
"""
Checks if relation with given name is already registered.
:param item: name of attribute
:type item: str
:return: result of the check
:rtype: bool
"""
return item in self._related_names
def clear(self) -> None:
for relation in self._relations.values():
relation.clear()
def get(self, name: str) -> Optional[Union["Model", Sequence["Model"]]]:
"""
Returns the related model/models if relation is set.
Actual call is delegated to Relation instance registered under relation name.
:param name: name of the relation
:type name: str
:return: related model or list of related models if set
:rtype: Optional[Union[Model, List[Model]]
"""
relation = self._relations.get(name, None)
if relation is not None:
return relation.get()
return None # pragma nocover
@staticmethod
def add(parent: "Model", child: "Model", field: "ForeignKeyField") -> None:
"""
Adds relation on both sides -> meaning on both child and parent models.
One side of the relation is always weakref proxy to avoid circular refs.
Based on the side from which relation is added and relation name actual names
of parent and child relations are established. The related models are registered
on both ends.
:param parent: parent model on which relation should be registered
:type parent: Model
:param child: child model to register
:type child: Model
:param field: field with relation definition
:type field: ForeignKeyField
"""
(parent, child, child_name, to_name) = get_relations_sides_and_names(
field, parent, child
)
# print('adding parent', parent.get_name(), child.get_name(), child_name)
parent_relation = parent._orm._get(child_name)
if parent_relation:
parent_relation.add(child) # type: ignore
# print('adding child', child.get_name(), parent.get_name(), to_name)
child_relation = child._orm._get(to_name)
if child_relation:
child_relation.add(parent)
def remove(
self, name: str, child: Union["NewBaseModel", Type["NewBaseModel"]]
) -> None:
"""
Removes given child from relation with given name.
Since you can have many relations between two models you need to pass a name
of relation from which you want to remove the child.
:param name: name of the relation
:type name: str
:param child: child to remove from relation
:type child: Union[Model, Type[Model]]
"""
relation = self._get(name)
if relation:
relation.remove(child)
@staticmethod
def remove_parent(
item: Union["NewBaseModel", Type["NewBaseModel"]], parent: "Model", name: str
) -> None:
"""
Removes given parent from relation with given name.
Since you can have many relations between two models you need to pass a name
of relation from which you want to remove the parent.
:param item: model with parent registered
:type item: Union[Model, Type[Model]]
:param parent: parent Model
:type parent: Model
:param name: name of the relation
:type name: str
"""
relation_name = item.ormar_config.model_fields[name].get_related_name()
item._orm.remove(name, parent)
parent._orm.remove(relation_name, item)
def _get(self, name: str) -> Optional[Relation]:
"""
Returns the actual relation and not the related model(s).
:param name: name of the relation
:type name: str
:return: Relation instance
:rtype: ormar.relations.relation.Relation
"""
relation = self._relations.get(name, None)
if relation is not None:
return relation
return None
def _get_relation_type(self, field: "BaseField") -> RelationType:
"""
Returns type of the relation declared on a field.
:param field: field with relation declaration
:type field: BaseField
:return: type of the relation defined on field
:rtype: RelationType
"""
if field.is_multi:
return RelationType.MULTIPLE
if field.is_through:
return RelationType.THROUGH
return RelationType.PRIMARY if not field.virtual else RelationType.REVERSE
def _add_relation(self, field: "BaseField") -> None:
"""
Registers relation in the manager.
Adds Relation instance under field.name.
:param field: field with relation declaration
:type field: BaseField
"""
self._relations[field.name] = Relation(
manager=self,
type_=self._get_relation_type(field),
field_name=field.name,
to=field.to,
through=getattr(field, "through", None),
)
collerek-ormar-c09209a/ormar/relations/relation_proxy.py 0000664 0000000 0000000 00000026501 15130200524 0023525 0 ustar 00root root 0000000 0000000 from typing import (
TYPE_CHECKING,
Any,
Dict,
Generic,
List,
Optional,
Set,
Type,
TypeVar,
)
from typing_extensions import SupportsIndex
import ormar
from ormar.exceptions import NoMatch, RelationshipInstanceError
from ormar.relations.querysetproxy import QuerysetProxy
if TYPE_CHECKING: # pragma no cover
from ormar import Model, RelationType
from ormar.models import T
from ormar.queryset import QuerySet
from ormar.relations import Relation
else:
T = TypeVar("T", bound="Model")
class RelationProxy(Generic[T], List[T]):
"""
Proxy of the Relation that is a list with special methods.
"""
def __init__(
self,
relation: "Relation",
type_: "RelationType",
to: Type["T"],
field_name: str,
data_: Any = None,
) -> None:
self.relation: "Relation[T]" = relation
self.type_: "RelationType" = type_
self.field_name = field_name
self._owner: "Model" = self.relation.manager.owner
self.queryset_proxy: QuerysetProxy[T] = QuerysetProxy[T](
relation=self.relation, to=to, type_=type_
)
self._related_field_name: Optional[str] = None
self._relation_cache: Dict[int, int] = {}
validated_data = []
if data_ is not None:
idx = 0
for d in data_:
try:
self._relation_cache[d.__hash__()] = idx
validated_data.append(d)
idx += 1
except ReferenceError:
pass
super().__init__(validated_data or ())
@property
def related_field_name(self) -> str:
"""
On first access calculates the name of the related field, later stored in
_related_field_name property.
:return: name of the related field
:rtype: str
"""
if self._related_field_name:
return self._related_field_name
owner_field = self._owner.ormar_config.model_fields[self.field_name]
self._related_field_name = owner_field.get_related_name()
return self._related_field_name
def __getitem__(self, item: Any) -> "T": # type: ignore
return super().__getitem__(item)
def append(self, item: "T") -> None:
"""
Appends an item to the list in place
:param item: The generic item of the list
:type item: T
"""
idx = len(self)
self._relation_cache[item.__hash__()] = idx
super().append(item)
def update_cache(self, prev_hash: int, new_hash: int) -> None:
"""
Updates the cache from the old hash to the new one.
This maintains the index cache, which allows O(1) indexing and
existence checks
:param prev_hash: The hash to update
:type prev_hash: int
:param prev_hash: The new hash to update to
:type new_hash: int
"""
try:
idx = self._relation_cache.pop(prev_hash)
self._relation_cache[new_hash] = idx
except KeyError:
pass
def index(self, item: T, *args: Any) -> int:
"""
Gets the index of the item in the list
:param item: The item to get the index of
:type item: "T"
"""
return self._relation_cache[item.__hash__()]
def _get_list_of_missing_weakrefs(self) -> Set[int]:
"""
Iterates through the list and checks for weakrefs.
:return: The set of missing weakref indices
:rtype: Set[int]
"""
to_remove = set()
for ind, relation_child in enumerate(self[:]):
try:
relation_child.__repr__.__self__ # type: ignore
except ReferenceError: # pragma no cover
to_remove.add(ind)
return to_remove
def pop(self, index: SupportsIndex = 0) -> T:
"""
Pops the index off the list and returns it. By default,
it pops off the element at index 0.
This also clears the value from the relation cache.
:param index: The index to pop
:type index: SupportsIndex
:return: The item at the provided index
:rtype: "T"
"""
item = self[index]
# Try to delete it, but do it a long way
# if weakly-referenced thing doesn't exist
try:
self._relation_cache.pop(item.__hash__())
except ReferenceError:
for hash_, idx in self._relation_cache.items():
if idx == index:
self._relation_cache.pop(hash_)
break
index_int = int(index)
for idx in range(index_int + 1, len(self)):
self._relation_cache[self[idx].__hash__()] -= 1
return super().pop(index)
def __contains__(self, item: object) -> bool:
"""
Checks whether the item exists in self. This relies
on the relation cache, which is a hashmap of values
in the list. It runs in O(1) time.
:param item: The item to check if the list contains
:type item: object
"""
try:
return item.__hash__() in self._relation_cache
except ReferenceError:
return False
def __getattribute__(self, item: str) -> Any:
"""
Since some QuerySetProxy methods overwrite builtin list methods we
catch calls to them and delegate it to QuerySetProxy instead.
:param item: name of attribute
:type item: str
:return: value of attribute
:rtype: Any
"""
if item in ["count", "clear"]:
self._initialize_queryset()
return getattr(self.queryset_proxy, item)
return super().__getattribute__(item)
def __getattr__(self, item: str) -> Any:
"""
Delegates calls for non existing attributes to QuerySetProxy.
:param item: name of attribute/method
:type item: str
:return: method from QuerySetProxy if exists
:rtype: method
"""
self._initialize_queryset()
return getattr(self.queryset_proxy, item)
def _clear(self) -> None:
self._relation_cache.clear()
super().clear()
def _initialize_queryset(self) -> None:
"""
Initializes the QuerySetProxy if not yet initialized.
"""
if not self._check_if_queryset_is_initialized():
self.queryset_proxy.queryset = self._set_queryset()
def _check_if_queryset_is_initialized(self) -> bool:
"""
Checks if the QuerySetProxy is already set and ready.
:return: result of the check
:rtype: bool
"""
return (
hasattr(self.queryset_proxy, "queryset")
and self.queryset_proxy.queryset is not None
)
def _check_if_model_saved(self) -> None:
"""
Verifies if the parent model of the relation has been already saved.
Otherwise QuerySetProxy cannot filter by parent primary key.
"""
pk_value = self._owner.pk
if not pk_value:
raise RelationshipInstanceError(
"You cannot query relationships from unsaved model."
)
def _set_queryset(self) -> "QuerySet[T]":
"""
Creates new QuerySet with relation model and pre filters it with currents
parent model primary key, so all queries by definition are already related
to the parent model only, without need for user to filter them.
:return: initialized QuerySet
:rtype: QuerySet
"""
related_field_name = self.related_field_name
pkname = self._owner.get_column_alias(self._owner.ormar_config.pkname)
self._check_if_model_saved()
kwargs = {f"{related_field_name}__{pkname}": self._owner.pk}
queryset = (
ormar.QuerySet(
model_cls=self.relation.to, proxy_source_model=self._owner.__class__
)
.select_related(related_field_name)
.filter(**kwargs)
)
return queryset
async def remove( # type: ignore
self, item: "T", keep_reversed: bool = True
) -> None:
"""
Removes the related from relation with parent.
Through models are automatically deleted for m2m relations.
For reverse FK relations keep_reversed flag marks if the reversed models
should be kept or deleted from the database too (False means that models
will be deleted, and not only removed from relation).
:param item: child to remove from relation
:type item: Model
:param keep_reversed: flag if the reversed model should be kept or deleted too
:type keep_reversed: bool
"""
if item not in self:
raise NoMatch(
f"Object {self._owner.get_name()} has no "
f"{item.get_name()} with given primary key!"
)
await self._owner.signals.pre_relation_remove.send(
sender=self._owner.__class__,
instance=self._owner,
child=item,
relation_name=self.field_name,
)
index_to_remove = self._relation_cache[item.__hash__()]
self.pop(index_to_remove)
relation_name = self.related_field_name
relation = item._orm._get(relation_name)
# if relation is None: # pragma nocover
# raise ValueError(
# f"{self._owner.get_name()} does not have relation {relation_name}"
# )
if relation:
relation.remove(self._owner)
self.relation.remove(item)
if self.type_ == ormar.RelationType.MULTIPLE:
await self.queryset_proxy.delete_through_instance(item)
else:
if keep_reversed:
setattr(item, relation_name, None)
await item.update()
else:
await item.delete()
await self._owner.signals.post_relation_remove.send(
sender=self._owner.__class__,
instance=self._owner,
child=item,
relation_name=self.field_name,
)
async def add(self, item: "T", **kwargs: Any) -> None:
"""
Adds child model to relation.
For ManyToMany relations through instance is automatically created.
:param kwargs: dict of additional keyword arguments for through instance
:type kwargs: Any
:param item: child to add to relation
:type item: Model
"""
new_idx = len(self) if item not in self else self.index(item)
relation_name = self.related_field_name
await self._owner.signals.pre_relation_add.send(
sender=self._owner.__class__,
instance=self._owner,
child=item,
relation_name=self.field_name,
passed_kwargs=kwargs,
)
self._check_if_model_saved()
if self.type_ == ormar.RelationType.MULTIPLE:
await self.queryset_proxy.create_through_instance(item, **kwargs)
setattr(self._owner, self.field_name, item)
else:
setattr(item, relation_name, self._owner)
await item.upsert()
self._relation_cache[item.__hash__()] = new_idx
await self._owner.signals.post_relation_add.send(
sender=self._owner.__class__,
instance=self._owner,
child=item,
relation_name=self.field_name,
passed_kwargs=kwargs,
)
collerek-ormar-c09209a/ormar/relations/utils.py 0000664 0000000 0000000 00000002043 15130200524 0021602 0 ustar 00root root 0000000 0000000 from typing import TYPE_CHECKING, Tuple
from weakref import proxy
from ormar.fields.foreign_key import ForeignKeyField
if TYPE_CHECKING: # pragma no cover
from ormar import Model
def get_relations_sides_and_names(
to_field: ForeignKeyField, parent: "Model", child: "Model"
) -> Tuple["Model", "Model", str, str]:
"""
Determines the names of child and parent relations names, as well as
changes one of the sides of the relation into weakref.proxy to model.
:param to_field: field with relation definition
:type to_field: ForeignKeyField
:param parent: parent model
:type parent: Model
:param child: child model
:type child: Model
:return: parent, child, child_name, to_name
:rtype: Tuple["Model", "Model", str, str]
"""
to_name = to_field.name
child_name = to_field.get_related_name()
if to_field.virtual:
child_name, to_name = to_name, child_name
child, parent = parent, proxy(child)
else:
child = proxy(child)
return parent, child, child_name, to_name
collerek-ormar-c09209a/ormar/signals/ 0000775 0000000 0000000 00000000000 15130200524 0017531 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/ormar/signals/__init__.py 0000664 0000000 0000000 00000000402 15130200524 0021636 0 ustar 00root root 0000000 0000000 """
Signals and SignalEmitter that gathers the signals on models OrmarConfig.
Used to signal receivers functions about events, i.e. post_save, pre_delete etc.
"""
from ormar.signals.signal import Signal, SignalEmitter
__all__ = ["Signal", "SignalEmitter"]
collerek-ormar-c09209a/ormar/signals/signal.py 0000664 0000000 0000000 00000006576 15130200524 0021376 0 ustar 00root root 0000000 0000000 import asyncio
import inspect
from typing import TYPE_CHECKING, Any, Callable, Dict, Tuple, Type, Union
from ormar.exceptions import SignalDefinitionError
if TYPE_CHECKING: # pragma: no cover
from ormar import Model
def callable_accepts_kwargs(func: Callable) -> bool:
"""
Checks if function accepts **kwargs.
:param func: function which signature needs to be checked
:type func: function
:return: result of the check
:rtype: bool
"""
return any(
p
for p in inspect.signature(func).parameters.values()
if p.kind == p.VAR_KEYWORD
)
def make_id(target: Any) -> Union[int, Tuple[int, int]]:
"""
Creates id of a function or method to be used as key to store signal
:param target: target which id we want
:type target: Any
:return: id of the target
:rtype: int
"""
if hasattr(target, "__func__"):
return id(target.__self__), id(target.__func__)
return id(target)
class Signal:
"""
Signal that notifies all receiver functions.
In ormar used by models to send pre_save, post_save etc. signals.
"""
def __init__(self) -> None:
self._receivers: Dict[Union[int, Tuple[int, int]], Callable] = {}
def connect(self, receiver: Callable) -> None:
"""
Connects given receiver function to the signal.
:raises SignalDefinitionError: if receiver is not callable
or not accept **kwargs
:param receiver: receiver function
:type receiver: Callable
"""
if not callable(receiver):
raise SignalDefinitionError("Signal receivers must be callable.")
if not callable_accepts_kwargs(receiver):
raise SignalDefinitionError(
"Signal receivers must accept **kwargs argument."
)
new_receiver_key = make_id(receiver)
if new_receiver_key not in self._receivers:
self._receivers[new_receiver_key] = receiver
def disconnect(self, receiver: Callable) -> bool:
"""
Removes the receiver function from the signal.
:param receiver: receiver function
:type receiver: Callable
:return: flag if receiver was removed
:rtype: bool
"""
new_receiver_key = make_id(receiver)
receiver_func: Union[Callable, None] = self._receivers.pop(
new_receiver_key, None
)
return True if receiver_func is not None else False
async def send(self, sender: Type["Model"], **kwargs: Any) -> None:
"""
Notifies all receiver functions with given kwargs
:param sender: model that sends the signal
:type sender: Type["Model"]
:param kwargs: arguments passed to receivers
:type kwargs: Any
"""
receivers = [
receiver_func(sender=sender, **kwargs)
for receiver_func in self._receivers.values()
]
await asyncio.gather(*receivers)
class SignalEmitter(dict):
"""
Emitter that registers the signals in internal dictionary.
If signal with given name does not exist it's auto added on access.
"""
def __getattr__(self, item: str) -> Signal:
return self.setdefault(item, Signal())
def __setattr__(self, key: str, value: Signal) -> None:
if not isinstance(value, Signal):
raise SignalDefinitionError(f"{value} is not valid signal")
self[key] = value
collerek-ormar-c09209a/ormar/warnings.py 0000664 0000000 0000000 00000003534 15130200524 0020300 0 ustar 00root root 0000000 0000000 # Adopted from pydantic
from typing import Optional, Tuple
class OrmarDeprecationWarning(DeprecationWarning):
"""A Pydantic specific deprecation warning.
This warning is raised when using deprecated functionality in Ormar.
It provides information on when the deprecation was introduced and
the expected version in which the corresponding functionality will be removed.
Attributes:
message: Description of the warning
since: Ormar version in what the deprecation was introduced
expected_removal: Ormar version in what the functionality will be removed
"""
message: str
since: Tuple[int, int]
expected_removal: Tuple[int, int]
def __init__(
self,
message: str,
*args: object,
since: Tuple[int, int],
expected_removal: Optional[Tuple[int, int]] = None,
) -> None: # pragma: no cover
super().__init__(message, *args)
self.message = message.rstrip(".")
self.since = since
self.expected_removal = (
expected_removal if expected_removal is not None else (since[0] + 1, 0)
)
def __str__(self) -> str: # pragma: no cover
message = (
f"{self.message}. Deprecated in Ormar V{self.since[0]}.{self.since[1]}"
f" to be removed in V{self.expected_removal[0]}.{self.expected_removal[1]}."
)
if self.since == (0, 20):
message += " See Ormar V0.20 Migration Guide at https://collerek.github.io/ormar/migration/"
return message
class OrmarDeprecatedSince020(OrmarDeprecationWarning):
"""A specific `OrmarDeprecationWarning` subclass defining
functionality deprecated since Ormar 0.20."""
def __init__(self, message: str, *args: object) -> None: # pragma: no cover
super().__init__(message, *args, since=(0, 20), expected_removal=(0, 30))
collerek-ormar-c09209a/poetry.lock 0000664 0000000 0000000 00000737527 15130200524 0017172 0 ustar 00root root 0000000 0000000 # This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand.
[[package]]
name = "aiomysql"
version = "0.3.0"
description = "MySQL driver for asyncio."
optional = true
python-versions = ">=3.9"
groups = ["main"]
markers = "extra == \"mysql\" or extra == \"all\""
files = [
{file = "aiomysql-0.3.0-py3-none-any.whl", hash = "sha256:448b31e0d11b6e8dd6e8356b7f39cec0e44293b2c6e368c9e8eeeaff77b3cbec"},
{file = "aiomysql-0.3.0.tar.gz", hash = "sha256:9e850677d456dc4fea59ad871d4bcdb6adbce02326aaa32a97a43125256f23de"},
]
[package.dependencies]
PyMySQL = ">=1.0"
[package.extras]
rsa = ["PyMySQL[rsa] (>=1.0)"]
sa = ["sqlalchemy (>=1.3,<1.4)"]
[[package]]
name = "aiopg"
version = "1.4.0"
description = "Postgres integration with asyncio."
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"aiopg\" or extra == \"all\""
files = [
{file = "aiopg-1.4.0-py3-none-any.whl", hash = "sha256:aea46e8aff30b039cfa818e6db4752c97656e893fc75e5a5dc57355a9e9dedbd"},
{file = "aiopg-1.4.0.tar.gz", hash = "sha256:116253bef86b4d954116716d181e9a0294037f266718b2e1c9766af995639d71"},
]
[package.dependencies]
async-timeout = ">=3.0,<5.0"
psycopg2-binary = ">=2.9.5"
[package.extras]
sa = ["sqlalchemy[postgresql-psycopg2binary] (>=1.3,<1.5)"]
[[package]]
name = "aiosqlite"
version = "0.20.0"
description = "asyncio bridge to the standard sqlite3 module"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"sqlite\" or extra == \"all\""
files = [
{file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"},
{file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"},
]
[package.dependencies]
typing_extensions = ">=4.0"
[package.extras]
dev = ["attribution (==1.7.0)", "black (==24.2.0)", "coverage[toml] (==7.4.1)", "flake8 (==7.0.0)", "flake8-bugbear (==24.2.6)", "flit (==3.9.0)", "mypy (==1.8.0)", "ufmt (==2.3.0)", "usort (==1.0.8.post1)"]
docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"]
[[package]]
name = "annotated-doc"
version = "0.0.4"
description = "Document parameters, class attributes, return types, and variables inline, with Annotated."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"},
{file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"},
]
[[package]]
name = "annotated-types"
version = "0.7.0"
description = "Reusable constraint types to use with typing.Annotated"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
[[package]]
name = "anyio"
version = "4.5.2"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"},
{file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"},
]
[package.dependencies]
exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
idna = ">=2.8"
sniffio = ">=1.1"
typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
[package.extras]
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21.0b1) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\""]
trio = ["trio (>=0.26.1)"]
[[package]]
name = "asgi-lifespan"
version = "2.1.0"
description = "Programmatic startup/shutdown of ASGI apps."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "asgi-lifespan-2.1.0.tar.gz", hash = "sha256:5e2effaf0bfe39829cf2d64e7ecc47c7d86d676a6599f7afba378c31f5e3a308"},
{file = "asgi_lifespan-2.1.0-py3-none-any.whl", hash = "sha256:ed840706680e28428c01e14afb3875d7d76d3206f3d5b2f2294e059b5c23804f"},
]
[package.dependencies]
sniffio = "*"
[[package]]
name = "async-timeout"
version = "4.0.3"
description = "Timeout context manager for asyncio programs"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "(extra == \"postgresql\" or extra == \"postgres\" or extra == \"all\") and python_version < \"3.11.0\" or extra == \"aiopg\" or extra == \"all\""
files = [
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
[[package]]
name = "asyncpg"
version = "0.30.0"
description = "An asyncio PostgreSQL driver"
optional = true
python-versions = ">=3.8.0"
groups = ["main"]
markers = "extra == \"postgresql\" or extra == \"postgres\" or extra == \"all\""
files = [
{file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"},
{file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"},
{file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"},
{file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"},
{file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"},
{file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"},
{file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"},
{file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"},
{file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"},
{file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"},
{file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"},
{file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"},
{file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"},
{file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"},
{file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"},
{file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"},
{file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"},
{file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"},
{file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"},
{file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"},
{file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"},
{file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"},
{file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"},
{file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"},
{file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"},
{file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"},
{file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"},
{file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"},
{file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"},
{file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"},
{file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"},
{file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"},
{file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"},
{file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"},
{file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"},
{file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"},
{file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"},
{file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"},
{file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"},
{file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"},
{file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"},
{file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"},
{file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"},
{file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"},
{file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"},
{file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"},
{file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"},
{file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"},
{file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"},
]
[package.dependencies]
async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.0\""}
[package.extras]
docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"]
gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""]
test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""]
[[package]]
name = "babel"
version = "2.16.0"
description = "Internationalization utilities"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"},
{file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"},
]
[package.extras]
dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "black"
version = "24.8.0"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"},
{file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"},
{file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"},
{file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"},
{file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"},
{file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"},
{file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"},
{file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"},
{file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"},
{file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"},
{file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"},
{file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"},
{file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"},
{file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"},
{file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"},
{file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"},
{file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"},
{file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"},
{file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"},
{file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"},
{file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"},
{file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"},
]
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
[package.extras]
colorama = ["colorama (>=0.4.3)"]
d = ["aiohttp (>=3.7.4) ; sys_platform != \"win32\" or implementation_name != \"pypy\"", "aiohttp (>=3.7.4,!=3.9.0) ; sys_platform == \"win32\" and implementation_name == \"pypy\""]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
[[package]]
name = "certifi"
version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
groups = ["dev"]
files = [
{file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
]
[[package]]
name = "cffi"
version = "1.17.1"
description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
{file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
{file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
{file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
{file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
{file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
{file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
{file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
{file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
{file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
{file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
{file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
{file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
{file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
{file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
{file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
{file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
{file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
{file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
{file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
{file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
{file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
]
markers = {main = "(extra == \"crypto\" or extra == \"all\") and platform_python_implementation != \"PyPy\""}
[package.dependencies]
pycparser = "*"
[[package]]
name = "cfgv"
version = "3.4.0"
description = "Validate configuration and produce human readable error messages."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
]
[[package]]
name = "charset-normalizer"
version = "3.4.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
groups = ["dev"]
files = [
{file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"},
{file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"},
{file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"},
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"},
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"},
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"},
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"},
{file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"},
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"},
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"},
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"},
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"},
{file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"},
{file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"},
{file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"},
{file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"},
{file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"},
{file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"},
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"},
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"},
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"},
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"},
{file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"},
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"},
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"},
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"},
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"},
{file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"},
{file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"},
{file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"},
{file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"},
{file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"},
{file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"},
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"},
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"},
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"},
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"},
{file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"},
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"},
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"},
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"},
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"},
{file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"},
{file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"},
{file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"},
{file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"},
{file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"},
{file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"},
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"},
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"},
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"},
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"},
{file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"},
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"},
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"},
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"},
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"},
{file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"},
{file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"},
{file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"},
{file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"},
{file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"},
{file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"},
{file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"},
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"},
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"},
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"},
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"},
{file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"},
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"},
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"},
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"},
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"},
{file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"},
{file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"},
{file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"},
{file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"},
{file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"},
{file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"},
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"},
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"},
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"},
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"},
{file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"},
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"},
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"},
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"},
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"},
{file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"},
{file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"},
{file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"},
{file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"},
{file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
]
[[package]]
name = "click"
version = "8.1.7"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"},
{file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"},
]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "codecov"
version = "2.1.13"
description = "Hosted coverage reports for GitHub, Bitbucket and Gitlab"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
groups = ["dev"]
files = [
{file = "codecov-2.1.13-py2.py3-none-any.whl", hash = "sha256:c2ca5e51bba9ebb43644c43d0690148a55086f7f5e6fd36170858fa4206744d5"},
{file = "codecov-2.1.13.tar.gz", hash = "sha256:2362b685633caeaf45b9951a9b76ce359cd3581dd515b430c6c3f5dfb4d92a8c"},
]
[package.dependencies]
coverage = "*"
requests = ">=2.7.9"
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["dev"]
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "coverage"
version = "7.6.1"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"},
{file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"},
{file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"},
{file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"},
{file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"},
{file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"},
{file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"},
{file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"},
{file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"},
{file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"},
{file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"},
{file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"},
{file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"},
{file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"},
{file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"},
{file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"},
{file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"},
{file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"},
{file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"},
{file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"},
{file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"},
{file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"},
{file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"},
{file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"},
{file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"},
{file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"},
{file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"},
{file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"},
{file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"},
{file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"},
{file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"},
{file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"},
{file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"},
{file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"},
{file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"},
{file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"},
{file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"},
{file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"},
{file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"},
{file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"},
{file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"},
{file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"},
{file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"},
{file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"},
]
[package.dependencies]
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras]
toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
[[package]]
name = "cryptography"
version = "43.0.3"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"crypto\" or extra == \"all\""
files = [
{file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"},
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"},
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"},
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"},
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"},
{file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"},
{file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"},
{file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"},
{file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"},
{file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"},
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"},
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"},
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"},
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"},
{file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"},
{file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"},
{file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"},
{file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"},
{file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"},
{file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"},
{file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"},
{file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"},
{file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"},
{file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"},
{file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"},
{file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"},
{file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"},
]
[package.dependencies]
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
nox = ["nox"]
pep8test = ["check-sdist", "click", "mypy", "ruff"]
sdist = ["build"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
test-randomorder = ["pytest-randomly"]
[[package]]
name = "databases"
version = "0.9.0"
description = "Async database support for Python."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "databases-0.9.0-py3-none-any.whl", hash = "sha256:9ee657c9863b34f8d3a06c06eafbe1bda68af2a434b56996312edf1f1c0b6297"},
{file = "databases-0.9.0.tar.gz", hash = "sha256:d2f259677609bf187737644c95fa41701072e995dfeb8d2882f335795c5b61b0"},
]
[package.dependencies]
sqlalchemy = ">=2.0.7"
[package.extras]
aiomysql = ["aiomysql"]
aiopg = ["aiopg"]
aiosqlite = ["aiosqlite"]
asyncmy = ["asyncmy"]
asyncpg = ["asyncpg"]
mysql = ["aiomysql"]
postgresql = ["asyncpg"]
sqlite = ["aiosqlite"]
[[package]]
name = "dataclasses"
version = "0.6"
description = "A backport of the dataclasses module for Python 3.6"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "dataclasses-0.6-py3-none-any.whl", hash = "sha256:454a69d788c7fda44efd71e259be79577822f5e3f53f029a22d08004e951dc9f"},
{file = "dataclasses-0.6.tar.gz", hash = "sha256:6988bd2b895eef432d562370bb707d540f32f7360ab13da45340101bc2307d84"},
]
[[package]]
name = "distlib"
version = "0.3.9"
description = "Distribution utilities"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"},
{file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"},
]
[[package]]
name = "dnspython"
version = "2.6.1"
description = "DNS toolkit"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"},
{file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"},
]
[package.extras]
dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
dnssec = ["cryptography (>=41)"]
doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
doq = ["aioquic (>=0.9.25)"]
idna = ["idna (>=3.6)"]
trio = ["trio (>=0.23)"]
wmi = ["wmi (>=1.5.1)"]
[[package]]
name = "email-validator"
version = "2.2.0"
description = "A robust email address syntax and deliverability validation library."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"},
{file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"},
]
[package.dependencies]
dnspython = ">=2.0.0"
idna = ">=2.0.0"
[[package]]
name = "exceptiongroup"
version = "1.2.2"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
markers = "python_version < \"3.11\""
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]]
name = "faker"
version = "35.2.2"
description = "Faker is a Python package that generates fake data for you."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "Faker-35.2.2-py3-none-any.whl", hash = "sha256:94216ce3d8affdc0a8fd0ea8219c184c346a1dcf07b03f193e52f3116186621e"},
{file = "faker-35.2.2.tar.gz", hash = "sha256:0a79ebe8f0ea803f7bd288d51e2d445b86035a2480e048daee1bffbd4d69b32b"},
]
[package.dependencies]
python-dateutil = ">=2.4"
typing-extensions = "*"
[[package]]
name = "fastapi"
version = "0.128.0"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d"},
{file = "fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a"},
]
[package.dependencies]
annotated-doc = ">=0.0.2"
pydantic = ">=2.7.0"
starlette = ">=0.40.0,<0.51.0"
typing-extensions = ">=4.8.0"
[package.extras]
all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
[[package]]
name = "filelock"
version = "3.16.1"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
{file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
]
[package.extras]
docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""]
[[package]]
name = "ghp-import"
version = "2.1.0"
description = "Copy your docs directly to the gh-pages branch."
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"},
{file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"},
]
[package.dependencies]
python-dateutil = ">=2.8.1"
[package.extras]
dev = ["flake8", "markdown", "twine", "wheel"]
[[package]]
name = "greenlet"
version = "3.1.1"
description = "Lightweight in-process concurrent programming"
optional = false
python-versions = ">=3.7"
groups = ["main"]
markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""
files = [
{file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"},
{file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"},
{file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"},
{file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"},
{file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"},
{file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"},
{file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"},
{file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"},
{file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"},
{file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"},
{file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"},
{file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"},
{file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"},
{file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"},
{file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"},
{file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"},
{file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"},
{file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"},
{file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"},
{file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"},
{file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"},
{file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"},
{file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"},
{file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"},
{file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"},
{file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"},
{file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"},
{file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"},
{file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"},
{file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"},
{file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"},
{file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"},
{file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"},
{file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"},
{file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"},
{file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"},
{file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"},
{file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"},
{file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"},
{file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"},
{file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"},
{file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"},
{file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"},
{file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"},
{file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"},
{file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"},
{file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"},
{file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"},
{file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"},
{file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"},
{file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"},
{file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"},
{file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"},
{file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"},
{file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"},
{file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"},
{file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"},
{file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"},
{file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"},
{file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"},
{file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"},
{file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"},
{file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"},
{file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"},
{file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"},
{file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"},
{file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"},
{file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"},
{file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"},
]
[package.extras]
docs = ["Sphinx", "furo"]
test = ["objgraph", "psutil"]
[[package]]
name = "griffe"
version = "1.4.0"
description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "griffe-1.4.0-py3-none-any.whl", hash = "sha256:e589de8b8c137e99a46ec45f9598fc0ac5b6868ce824b24db09c02d117b89bc5"},
{file = "griffe-1.4.0.tar.gz", hash = "sha256:8fccc585896d13f1221035d32c50dec65830c87d23f9adb9b1e6f3d63574f7f5"},
]
[package.dependencies]
colorama = ">=0.4"
[[package]]
name = "h11"
version = "0.16.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"},
{file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"},
]
[[package]]
name = "httpcore"
version = "1.0.9"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"},
{file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"},
]
[package.dependencies]
certifi = "*"
h11 = ">=0.16"
[package.extras]
asyncio = ["anyio (>=4.0,<5.0)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
trio = ["trio (>=0.22.0,<1.0)"]
[[package]]
name = "httpx"
version = "0.28.1"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"},
{file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"},
]
[package.dependencies]
anyio = "*"
certifi = "*"
httpcore = "==1.*"
idna = "*"
[package.extras]
brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""]
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "identify"
version = "2.6.1"
description = "File identification library for Python"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"},
{file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"},
]
[package.extras]
license = ["ukkonen"]
[[package]]
name = "idna"
version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.6"
groups = ["dev"]
files = [
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
]
[package.extras]
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
[[package]]
name = "importlib-metadata"
version = "8.5.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
{file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
]
[package.dependencies]
zipp = ">=3.20"
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
perf = ["ipython"]
test = ["flufl.flake8", "importlib-resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
type = ["pytest-mypy"]
[[package]]
name = "importlib-resources"
version = "6.4.5"
description = "Read resources from Python packages"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"},
{file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"},
]
[package.dependencies]
zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""}
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"]
type = ["pytest-mypy"]
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "jinja2"
version = "3.1.6"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"},
{file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"},
]
[package.dependencies]
MarkupSafe = ">=2.0"
[package.extras]
i18n = ["Babel (>=2.7)"]
[[package]]
name = "markdown"
version = "3.7"
description = "Python implementation of John Gruber's Markdown."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "Markdown-3.7-py3-none-any.whl", hash = "sha256:7eb6df5690b81a1d7942992c97fad2938e956e79df20cbc6186e9c3a77b1c803"},
{file = "markdown-3.7.tar.gz", hash = "sha256:2ae2471477cfd02dbbf038d5d9bc226d40def84b4fe2986e49b59b6b472bbed2"},
]
[package.dependencies]
importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
[package.extras]
docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"]
testing = ["coverage", "pyyaml"]
[[package]]
name = "markdown-it-py"
version = "3.0.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
]
[package.dependencies]
mdurl = ">=0.1,<1.0"
[package.extras]
benchmarking = ["psutil", "pytest", "pytest-benchmark"]
code-style = ["pre-commit (>=3.0,<4.0)"]
compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
linkify = ["linkify-it-py (>=1,<3)"]
plugins = ["mdit-py-plugins"]
profiling = ["gprof2dot"]
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "markupsafe"
version = "2.1.5"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
{file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
{file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
{file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
{file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
{file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
{file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
{file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
{file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
{file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
{file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
{file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
{file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
{file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
{file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
{file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
{file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
{file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
{file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
{file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
{file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
{file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
{file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
{file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
{file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
{file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
{file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
{file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
]
[[package]]
name = "mdurl"
version = "0.1.2"
description = "Markdown URL utilities"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
[[package]]
name = "mergedeep"
version = "1.3.4"
description = "A deep merge function for 🐍."
optional = false
python-versions = ">=3.6"
groups = ["dev"]
files = [
{file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"},
{file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"},
]
[[package]]
name = "mike"
version = "2.1.3"
description = "Manage multiple versions of your MkDocs-powered documentation"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "mike-2.1.3-py3-none-any.whl", hash = "sha256:d90c64077e84f06272437b464735130d380703a76a5738b152932884c60c062a"},
{file = "mike-2.1.3.tar.gz", hash = "sha256:abd79b8ea483fb0275b7972825d3082e5ae67a41820f8d8a0dc7a3f49944e810"},
]
[package.dependencies]
importlib-metadata = "*"
importlib-resources = "*"
jinja2 = ">=2.7"
mkdocs = ">=1.0"
pyparsing = ">=3.0"
pyyaml = ">=5.1"
pyyaml-env-tag = "*"
verspec = "*"
[package.extras]
dev = ["coverage", "flake8 (>=3.0)", "flake8-quotes", "shtab"]
test = ["coverage", "flake8 (>=3.0)", "flake8-quotes", "shtab"]
[[package]]
name = "mkdocs"
version = "1.6.1"
description = "Project documentation with Markdown."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocs-1.6.1-py3-none-any.whl", hash = "sha256:db91759624d1647f3f34aa0c3f327dd2601beae39a366d6e064c03468d35c20e"},
{file = "mkdocs-1.6.1.tar.gz", hash = "sha256:7b432f01d928c084353ab39c57282f29f92136665bdd6abf7c1ec8d822ef86f2"},
]
[package.dependencies]
click = ">=7.0"
colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""}
ghp-import = ">=1.0"
importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
jinja2 = ">=2.11.1"
markdown = ">=3.3.6"
markupsafe = ">=2.0.1"
mergedeep = ">=1.3.4"
mkdocs-get-deps = ">=0.2.0"
packaging = ">=20.5"
pathspec = ">=0.11.1"
pyyaml = ">=5.1"
pyyaml-env-tag = ">=0.1"
watchdog = ">=2.0"
[package.extras]
i18n = ["babel (>=2.9.0)"]
min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4) ; platform_system == \"Windows\"", "ghp-import (==1.0)", "importlib-metadata (==4.4) ; python_version < \"3.10\"", "jinja2 (==2.11.1)", "markdown (==3.3.6)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "mkdocs-get-deps (==0.2.0)", "packaging (==20.5)", "pathspec (==0.11.1)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "watchdog (==2.0)"]
[[package]]
name = "mkdocs-autorefs"
version = "1.2.0"
description = "Automatically link across pages in MkDocs."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocs_autorefs-1.2.0-py3-none-any.whl", hash = "sha256:d588754ae89bd0ced0c70c06f58566a4ee43471eeeee5202427da7de9ef85a2f"},
{file = "mkdocs_autorefs-1.2.0.tar.gz", hash = "sha256:a86b93abff653521bda71cf3fc5596342b7a23982093915cb74273f67522190f"},
]
[package.dependencies]
Markdown = ">=3.3"
markupsafe = ">=2.0.1"
mkdocs = ">=1.1"
[[package]]
name = "mkdocs-gen-files"
version = "0.5.0"
description = "MkDocs plugin to programmatically generate documentation pages during the build"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "mkdocs_gen_files-0.5.0-py3-none-any.whl", hash = "sha256:7ac060096f3f40bd19039e7277dd3050be9a453c8ac578645844d4d91d7978ea"},
{file = "mkdocs_gen_files-0.5.0.tar.gz", hash = "sha256:4c7cf256b5d67062a788f6b1d035e157fc1a9498c2399be9af5257d4ff4d19bc"},
]
[package.dependencies]
mkdocs = ">=1.0.3"
[[package]]
name = "mkdocs-get-deps"
version = "0.2.0"
description = "MkDocs extension that lists all dependencies according to a mkdocs.yml file"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocs_get_deps-0.2.0-py3-none-any.whl", hash = "sha256:2bf11d0b133e77a0dd036abeeb06dec8775e46efa526dc70667d8863eefc6134"},
{file = "mkdocs_get_deps-0.2.0.tar.gz", hash = "sha256:162b3d129c7fad9b19abfdcb9c1458a651628e4b1dea628ac68790fb3061c60c"},
]
[package.dependencies]
importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""}
mergedeep = ">=1.3.4"
platformdirs = ">=2.2.0"
pyyaml = ">=5.1"
[[package]]
name = "mkdocs-literate-nav"
version = "0.6.1"
description = "MkDocs plugin to specify the navigation in Markdown instead of YAML"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "mkdocs_literate_nav-0.6.1-py3-none-any.whl", hash = "sha256:e70bdc4a07050d32da79c0b697bd88e9a104cf3294282e9cb20eec94c6b0f401"},
{file = "mkdocs_literate_nav-0.6.1.tar.gz", hash = "sha256:78a7ab6d878371728acb0cdc6235c9b0ffc6e83c997b037f4a5c6ff7cef7d759"},
]
[package.dependencies]
mkdocs = ">=1.0.3"
[[package]]
name = "mkdocs-material"
version = "9.5.50"
description = "Documentation that simply works"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocs_material-9.5.50-py3-none-any.whl", hash = "sha256:f24100f234741f4d423a9d672a909d859668a4f404796be3cf035f10d6050385"},
{file = "mkdocs_material-9.5.50.tar.gz", hash = "sha256:ae5fe16f3d7c9ccd05bb6916a7da7420cf99a9ce5e33debd9d40403a090d5825"},
]
[package.dependencies]
babel = ">=2.10,<3.0"
colorama = ">=0.4,<1.0"
jinja2 = ">=3.0,<4.0"
markdown = ">=3.2,<4.0"
mkdocs = ">=1.6,<2.0"
mkdocs-material-extensions = ">=1.3,<2.0"
paginate = ">=0.5,<1.0"
pygments = ">=2.16,<3.0"
pymdown-extensions = ">=10.2,<11.0"
regex = ">=2022.4"
requests = ">=2.26,<3.0"
[package.extras]
git = ["mkdocs-git-committers-plugin-2 (>=1.1,<3)", "mkdocs-git-revision-date-localized-plugin (>=1.2.4,<2.0)"]
imaging = ["cairosvg (>=2.6,<3.0)", "pillow (>=10.2,<11.0)"]
recommended = ["mkdocs-minify-plugin (>=0.7,<1.0)", "mkdocs-redirects (>=1.2,<2.0)", "mkdocs-rss-plugin (>=1.6,<2.0)"]
[[package]]
name = "mkdocs-material-extensions"
version = "1.3.1"
description = "Extension pack for Python Markdown and MkDocs Material."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocs_material_extensions-1.3.1-py3-none-any.whl", hash = "sha256:adff8b62700b25cb77b53358dad940f3ef973dd6db797907c49e3c2ef3ab4e31"},
{file = "mkdocs_material_extensions-1.3.1.tar.gz", hash = "sha256:10c9511cea88f568257f960358a467d12b970e1f7b2c0e5fb2bb48cab1928443"},
]
[[package]]
name = "mkdocs-section-index"
version = "0.3.9"
description = "MkDocs plugin to allow clickable sections that lead to an index page"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocs_section_index-0.3.9-py3-none-any.whl", hash = "sha256:5e5eb288e8d7984d36c11ead5533f376fdf23498f44e903929d72845b24dfe34"},
{file = "mkdocs_section_index-0.3.9.tar.gz", hash = "sha256:b66128d19108beceb08b226ee1ba0981840d14baf8a652b6c59e650f3f92e4f8"},
]
[package.dependencies]
mkdocs = ">=1.2"
[[package]]
name = "mkdocstrings"
version = "0.26.1"
description = "Automatic documentation from sources, for MkDocs."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocstrings-0.26.1-py3-none-any.whl", hash = "sha256:29738bfb72b4608e8e55cc50fb8a54f325dc7ebd2014e4e3881a49892d5983cf"},
{file = "mkdocstrings-0.26.1.tar.gz", hash = "sha256:bb8b8854d6713d5348ad05b069a09f3b79edbc6a0f33a34c6821141adb03fe33"},
]
[package.dependencies]
click = ">=7.0"
importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""}
Jinja2 = ">=2.11.1"
Markdown = ">=3.6"
MarkupSafe = ">=1.1"
mkdocs = ">=1.4"
mkdocs-autorefs = ">=1.2"
mkdocstrings-python = {version = ">=0.5.2", optional = true, markers = "extra == \"python\""}
platformdirs = ">=2.2"
pymdown-extensions = ">=6.3"
typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""}
[package.extras]
crystal = ["mkdocstrings-crystal (>=0.3.4)"]
python = ["mkdocstrings-python (>=0.5.2)"]
python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"]
[[package]]
name = "mkdocstrings-python"
version = "1.10.9"
description = "A Python handler for mkdocstrings."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mkdocstrings_python-1.10.9-py3-none-any.whl", hash = "sha256:cbe98710a6757dfd4dff79bf36cb9731908fb4c69dd2736b15270ae7a488243d"},
{file = "mkdocstrings_python-1.10.9.tar.gz", hash = "sha256:f344aaa47e727d8a2dc911e063025e58e2b7fb31a41110ccc3902aa6be7ca196"},
]
[package.dependencies]
griffe = ">=0.49"
mkdocs-autorefs = ">=1.0"
mkdocstrings = ">=0.25"
[[package]]
name = "mypy"
version = "1.13.0"
description = "Optional static typing for Python"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"},
{file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"},
{file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"},
{file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"},
{file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"},
{file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"},
{file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"},
{file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"},
{file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"},
{file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"},
{file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"},
{file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"},
{file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"},
{file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"},
{file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"},
{file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"},
{file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"},
{file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"},
{file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"},
{file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"},
{file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"},
{file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"},
{file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"},
{file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"},
{file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"},
{file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"},
{file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"},
{file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"},
{file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"},
{file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"},
{file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"},
{file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"},
]
[package.dependencies]
mypy-extensions = ">=1.0.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = ">=4.6.0"
[package.extras]
dmypy = ["psutil (>=4.0)"]
faster-cache = ["orjson"]
install-types = ["pip"]
mypyc = ["setuptools (>=50)"]
reports = ["lxml"]
[[package]]
name = "mypy-extensions"
version = "1.0.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.5"
groups = ["dev"]
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
]
[[package]]
name = "mysqlclient"
version = "2.2.6"
description = "Python interface to MySQL"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"all\""
files = [
{file = "mysqlclient-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:e94a92858203d97fd584bdb6d7ee8c56f2590db8d77fd44215c0dcf5e739bc37"},
{file = "mysqlclient-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:43c5b30be0675080b9c815f457d73397f0442173e7be83d089b126835e2617ae"},
{file = "mysqlclient-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:e940b41d85dfd7b190fa47d52f525f878cfa203d4653bf6a35b271b3c3be125b"},
{file = "mysqlclient-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:794857bce4f9a1903a99786dd29ad7887f45a870b3d11585b8c51c4a753c4174"},
{file = "mysqlclient-2.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:b0a5cddf1d3488b254605041070086cac743401d876a659a72d706a0d89c8ebb"},
{file = "mysqlclient-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f3efb849d6f7ef4b9788a0eda2e896b975e0ebf1d6bf3dcabea63fd698e5b0b5"},
{file = "mysqlclient-2.2.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3da70a07753ba6be881f7d75e795e254f6a0c12795778034acc69769b0649d37"},
{file = "mysqlclient-2.2.6.tar.gz", hash = "sha256:c0b46d9b78b461dbb62482089ca8040fa916595b1b30f831ebbd1b0a82b43d53"},
]
[[package]]
name = "nest-asyncio"
version = "1.6.0"
description = "Patch asyncio to allow nested event loops"
optional = false
python-versions = ">=3.5"
groups = ["dev"]
files = [
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
]
[[package]]
name = "nodeenv"
version = "1.9.1"
description = "Node.js virtual environment builder"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
groups = ["dev"]
files = [
{file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"},
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
]
[[package]]
name = "orjson"
version = "3.10.15"
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"orjson\" or extra == \"all\""
files = [
{file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"},
{file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"},
{file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8"},
{file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814"},
{file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164"},
{file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf"},
{file = "orjson-3.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061"},
{file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3"},
{file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d"},
{file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182"},
{file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e"},
{file = "orjson-3.10.15-cp310-cp310-win32.whl", hash = "sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab"},
{file = "orjson-3.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806"},
{file = "orjson-3.10.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6"},
{file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef"},
{file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334"},
{file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d"},
{file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0"},
{file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13"},
{file = "orjson-3.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5"},
{file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b"},
{file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399"},
{file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388"},
{file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c"},
{file = "orjson-3.10.15-cp311-cp311-win32.whl", hash = "sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e"},
{file = "orjson-3.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e"},
{file = "orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a"},
{file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d"},
{file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0"},
{file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4"},
{file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767"},
{file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41"},
{file = "orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514"},
{file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17"},
{file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b"},
{file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7"},
{file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a"},
{file = "orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665"},
{file = "orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa"},
{file = "orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6"},
{file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a"},
{file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9"},
{file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0"},
{file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307"},
{file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e"},
{file = "orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7"},
{file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8"},
{file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca"},
{file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561"},
{file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825"},
{file = "orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890"},
{file = "orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf"},
{file = "orjson-3.10.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e8afd6200e12771467a1a44e5ad780614b86abb4b11862ec54861a82d677746"},
{file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9a18c500f19273e9e104cca8c1f0b40a6470bcccfc33afcc088045d0bf5ea6"},
{file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb00b7bfbdf5d34a13180e4805d76b4567025da19a197645ca746fc2fb536586"},
{file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33aedc3d903378e257047fee506f11e0833146ca3e57a1a1fb0ddb789876c1e1"},
{file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd0099ae6aed5eb1fc84c9eb72b95505a3df4267e6962eb93cdd5af03be71c98"},
{file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c864a80a2d467d7786274fce0e4f93ef2a7ca4ff31f7fc5634225aaa4e9e98c"},
{file = "orjson-3.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c25774c9e88a3e0013d7d1a6c8056926b607a61edd423b50eb5c88fd7f2823ae"},
{file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e78c211d0074e783d824ce7bb85bf459f93a233eb67a5b5003498232ddfb0e8a"},
{file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:43e17289ffdbbac8f39243916c893d2ae41a2ea1a9cbb060a56a4d75286351ae"},
{file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:781d54657063f361e89714293c095f506c533582ee40a426cb6489c48a637b81"},
{file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6875210307d36c94873f553786a808af2788e362bd0cf4c8e66d976791e7b528"},
{file = "orjson-3.10.15-cp38-cp38-win32.whl", hash = "sha256:305b38b2b8f8083cc3d618927d7f424349afce5975b316d33075ef0f73576b60"},
{file = "orjson-3.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:5dd9ef1639878cc3efffed349543cbf9372bdbd79f478615a1c633fe4e4180d1"},
{file = "orjson-3.10.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ffe19f3e8d68111e8644d4f4e267a069ca427926855582ff01fc012496d19969"},
{file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d433bf32a363823863a96561a555227c18a522a8217a6f9400f00ddc70139ae2"},
{file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da03392674f59a95d03fa5fb9fe3a160b0511ad84b7a3914699ea5a1b3a38da2"},
{file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a63bb41559b05360ded9132032239e47983a39b151af1201f07ec9370715c82"},
{file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3766ac4702f8f795ff3fa067968e806b4344af257011858cc3d6d8721588b53f"},
{file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1c73dcc8fadbd7c55802d9aa093b36878d34a3b3222c41052ce6b0fc65f8e8"},
{file = "orjson-3.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b299383825eafe642cbab34be762ccff9fd3408d72726a6b2a4506d410a71ab3"},
{file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:abc7abecdbf67a173ef1316036ebbf54ce400ef2300b4e26a7b843bd446c2480"},
{file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:3614ea508d522a621384c1d6639016a5a2e4f027f3e4a1c93a51867615d28829"},
{file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:295c70f9dc154307777ba30fe29ff15c1bcc9dfc5c48632f37d20a607e9ba85a"},
{file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:63309e3ff924c62404923c80b9e2048c1f74ba4b615e7584584389ada50ed428"},
{file = "orjson-3.10.15-cp39-cp39-win32.whl", hash = "sha256:a2f708c62d026fb5340788ba94a55c23df4e1869fec74be455e0b2f5363b8507"},
{file = "orjson-3.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:efcf6c735c3d22ef60c4aa27a5238f1a477df85e9b15f2142f9d669beb2d13fd"},
{file = "orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e"},
]
[[package]]
name = "packaging"
version = "24.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
]
[[package]]
name = "paginate"
version = "0.5.7"
description = "Divides large result sets into pages for easier browsing"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "paginate-0.5.7-py2.py3-none-any.whl", hash = "sha256:b885e2af73abcf01d9559fd5216b57ef722f8c42affbb63942377668e35c7591"},
{file = "paginate-0.5.7.tar.gz", hash = "sha256:22bd083ab41e1a8b4f3690544afb2c60c25e5c9a63a30fa2f483f6c60c8e5945"},
]
[package.extras]
dev = ["pytest", "tox"]
lint = ["black"]
[[package]]
name = "pathspec"
version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
]
[[package]]
name = "platformdirs"
version = "4.3.6"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
{file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
]
[package.extras]
docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
type = ["mypy (>=1.11.2)"]
[[package]]
name = "pluggy"
version = "1.5.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"},
{file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pre-commit"
version = "3.5.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"},
{file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"},
]
[package.dependencies]
cfgv = ">=2.0.0"
identify = ">=1.0.0"
nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
[[package]]
name = "psycopg2-binary"
version = "2.9.10"
description = "psycopg2 - Python-PostgreSQL Database Adapter"
optional = true
python-versions = ">=3.8"
groups = ["main"]
markers = "extra == \"aiopg\" or extra == \"all\" or extra == \"postgresql\" or extra == \"postgres\""
files = [
{file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"},
{file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"},
{file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"},
{file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"},
{file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"},
{file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"},
{file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"},
]
[[package]]
name = "py-cpuinfo"
version = "9.0.0"
description = "Get CPU info with pure Python"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690"},
{file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"},
]
[[package]]
name = "pycparser"
version = "2.22"
description = "C parser in Python"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
]
markers = {main = "(extra == \"crypto\" or extra == \"all\") and platform_python_implementation != \"PyPy\""}
[[package]]
name = "pydantic"
version = "2.11.9"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
{file = "pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2"},
{file = "pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2"},
]
[package.dependencies]
annotated-types = ">=0.6.0"
pydantic-core = "2.33.2"
typing-extensions = ">=4.12.2"
typing-inspection = ">=0.4.0"
[package.extras]
email = ["email-validator (>=2.0.0)"]
timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""]
[[package]]
name = "pydantic-core"
version = "2.33.2"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"},
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"},
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"},
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"},
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"},
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"},
{file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"},
{file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"},
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"},
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"},
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"},
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"},
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"},
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"},
{file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"},
{file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"},
{file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"},
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"},
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"},
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"},
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"},
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"},
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"},
{file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"},
{file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"},
{file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"},
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"},
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"},
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"},
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"},
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"},
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"},
{file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"},
{file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"},
{file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"},
{file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"},
{file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"},
{file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"},
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"},
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"},
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"},
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"},
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"},
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"},
{file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"},
{file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"},
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"},
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"},
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"},
{file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"},
]
[package.dependencies]
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pydantic-extra-types"
version = "2.10.2"
description = "Extra Pydantic types."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pydantic_extra_types-2.10.2-py3-none-any.whl", hash = "sha256:9eccd55a2b7935cea25f0a67f6ff763d55d80c41d86b887d88915412ccf5b7fa"},
{file = "pydantic_extra_types-2.10.2.tar.gz", hash = "sha256:934d59ab7a02ff788759c3a97bc896f5cfdc91e62e4f88ea4669067a73f14b98"},
]
[package.dependencies]
pydantic = ">=2.5.2"
typing-extensions = "*"
[package.extras]
all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", "python-ulid (>=1,<2) ; python_version < \"3.9\"", "python-ulid (>=1,<4) ; python_version >= \"3.9\"", "pytz (>=2024.1)", "semver (>=3.0.2)", "semver (>=3.0.2,<3.1.0)", "tzdata (>=2024.1)"]
pendulum = ["pendulum (>=3.0.0,<4.0.0)"]
phonenumbers = ["phonenumbers (>=8,<9)"]
pycountry = ["pycountry (>=23)"]
python-ulid = ["python-ulid (>=1,<2) ; python_version < \"3.9\"", "python-ulid (>=1,<4) ; python_version >= \"3.9\""]
semver = ["semver (>=3.0.2)"]
[[package]]
name = "pygments"
version = "2.18.0"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
{file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
]
[package.extras]
windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pymdown-extensions"
version = "10.16.1"
description = "Extension pack for Python Markdown."
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "pymdown_extensions-10.16.1-py3-none-any.whl", hash = "sha256:d6ba157a6c03146a7fb122b2b9a121300056384eafeec9c9f9e584adfdb2a32d"},
{file = "pymdown_extensions-10.16.1.tar.gz", hash = "sha256:aace82bcccba3efc03e25d584e6a22d27a8e17caa3f4dd9f207e49b787aa9a91"},
]
[package.dependencies]
markdown = ">=3.6"
pyyaml = "*"
[package.extras]
extra = ["pygments (>=2.19.1)"]
[[package]]
name = "pymysql"
version = "1.1.1"
description = "Pure Python MySQL Driver"
optional = true
python-versions = ">=3.7"
groups = ["main"]
markers = "extra == \"mysql\" or extra == \"all\""
files = [
{file = "PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c"},
{file = "pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0"},
]
[package.extras]
ed25519 = ["PyNaCl (>=1.4.0)"]
rsa = ["cryptography"]
[[package]]
name = "pyparsing"
version = "3.1.4"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
optional = false
python-versions = ">=3.6.8"
groups = ["dev"]
files = [
{file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"},
{file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"},
]
[package.extras]
diagrams = ["jinja2", "railroad-diagrams"]
[[package]]
name = "pytest"
version = "8.3.4"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"},
{file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=1.5,<2"
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
version = "0.23.8"
description = "Pytest support for asyncio"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"},
{file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"},
]
[package.dependencies]
pytest = ">=7.0.0,<9"
[package.extras]
docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"]
testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"]
[[package]]
name = "pytest-benchmark"
version = "4.0.0"
description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "pytest-benchmark-4.0.0.tar.gz", hash = "sha256:fb0785b83efe599a6a956361c0691ae1dbb5318018561af10f3e915caa0048d1"},
{file = "pytest_benchmark-4.0.0-py3-none-any.whl", hash = "sha256:fdb7db64e31c8b277dff9850d2a2556d8b60bcb0ea6524e36e28ffd7c87f71d6"},
]
[package.dependencies]
py-cpuinfo = "*"
pytest = ">=3.8"
[package.extras]
aspect = ["aspectlib"]
elasticsearch = ["elasticsearch"]
histogram = ["pygal", "pygaljs"]
[[package]]
name = "pytest-codspeed"
version = "4.2.0"
description = "Pytest plugin to create CodSpeed benchmarks"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "pytest_codspeed-4.2.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:609828b03972966b75b9b7416fa2570c4a0f6124f67e02d35cd3658e64312a7b"},
{file = "pytest_codspeed-4.2.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23a0c0fbf8bb4de93a3454fd9e5efcdca164c778aaef0a9da4f233d85cb7f5b8"},
{file = "pytest_codspeed-4.2.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2de87bde9fbc6fd53f0fd21dcf2599c89e0b8948d49f9bad224edce51c47e26b"},
{file = "pytest_codspeed-4.2.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95aeb2479ca383f6b18e2cc9ebcd3b03ab184980a59a232aea6f370bbf59a1e3"},
{file = "pytest_codspeed-4.2.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7d4fefbd4ae401e2c60f6be920a0be50eef0c3e4a1f0a1c83962efd45be38b39"},
{file = "pytest_codspeed-4.2.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:309b4227f57fcbb9df21e889ea1ae191d0d1cd8b903b698fdb9ea0461dbf1dfe"},
{file = "pytest_codspeed-4.2.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72aab8278452a6d020798b9e4f82780966adb00f80d27a25d1274272c54630d5"},
{file = "pytest_codspeed-4.2.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:684fcd9491d810ded653a8d38de4835daa2d001645f4a23942862950664273f8"},
{file = "pytest_codspeed-4.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:50794dabea6ec90d4288904452051e2febace93e7edf4ca9f2bce8019dd8cd37"},
{file = "pytest_codspeed-4.2.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0ebd87f2a99467a1cfd8e83492c4712976e43d353ee0b5f71cbb057f1393aca"},
{file = "pytest_codspeed-4.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dbbb2d61b85bef8fc7e2193f723f9ac2db388a48259d981bbce96319043e9830"},
{file = "pytest_codspeed-4.2.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:748411c832147bfc85f805af78a1ab1684f52d08e14aabe22932bbe46c079a5f"},
{file = "pytest_codspeed-4.2.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:238e17abe8f08d8747fa6c7acff34fefd3c40f17a56a7847ca13dc8d6e8c6009"},
{file = "pytest_codspeed-4.2.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0881a736285f33b9a8894da8fe8e1775aa1a4310226abe5d1f0329228efb680c"},
{file = "pytest_codspeed-4.2.0-py3-none-any.whl", hash = "sha256:e81bbb45c130874ef99aca97929d72682733527a49f84239ba575b5cb843bab0"},
{file = "pytest_codspeed-4.2.0.tar.gz", hash = "sha256:04b5d0bc5a1851ba1504d46bf9d7dbb355222a69f2cd440d54295db721b331f7"},
]
[package.dependencies]
cffi = ">=1.17.1"
importlib-metadata = {version = ">=8.5.0", markers = "python_version < \"3.10\""}
pytest = ">=3.8"
rich = ">=13.8.1"
[package.extras]
compat = ["pytest-benchmark (>=5.0.0,<5.1.0)", "pytest-xdist (>=3.6.1,<3.7.0)"]
[[package]]
name = "pytest-cov"
version = "5.0.0"
description = "Pytest plugin for measuring coverage."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"},
{file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"},
]
[package.dependencies]
coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "python-dateutil"
version = "2.9.0.post0"
description = "Extensions to the standard Python datetime module"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["dev"]
files = [
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
]
[package.dependencies]
six = ">=1.5"
[[package]]
name = "pyyaml"
version = "6.0.2"
description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
{file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
{file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
{file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
{file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
{file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
{file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
{file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
{file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
{file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
{file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
{file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
{file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
{file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
{file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
{file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
{file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
{file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
{file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
{file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
{file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
{file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
{file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
[[package]]
name = "pyyaml-env-tag"
version = "0.1"
description = "A custom YAML tag for referencing environment variables in YAML files. "
optional = false
python-versions = ">=3.6"
groups = ["dev"]
files = [
{file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"},
{file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"},
]
[package.dependencies]
pyyaml = "*"
[[package]]
name = "regex"
version = "2024.11.6"
description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "regex-2024.11.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff590880083d60acc0433f9c3f713c51f7ac6ebb9adf889c79a261ecf541aa91"},
{file = "regex-2024.11.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:658f90550f38270639e83ce492f27d2c8d2cd63805c65a13a14d36ca126753f0"},
{file = "regex-2024.11.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:164d8b7b3b4bcb2068b97428060b2a53be050085ef94eca7f240e7947f1b080e"},
{file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3660c82f209655a06b587d55e723f0b813d3a7db2e32e5e7dc64ac2a9e86fde"},
{file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d22326fcdef5e08c154280b71163ced384b428343ae16a5ab2b3354aed12436e"},
{file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac758ef6aebfc8943560194e9fd0fa18bcb34d89fd8bd2af18183afd8da3a2"},
{file = "regex-2024.11.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:997d6a487ff00807ba810e0f8332c18b4eb8d29463cfb7c820dc4b6e7562d0cf"},
{file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02a02d2bb04fec86ad61f3ea7f49c015a0681bf76abb9857f945d26159d2968c"},
{file = "regex-2024.11.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f02f93b92358ee3f78660e43b4b0091229260c5d5c408d17d60bf26b6c900e86"},
{file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:06eb1be98df10e81ebaded73fcd51989dcf534e3c753466e4b60c4697a003b67"},
{file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:040df6fe1a5504eb0f04f048e6d09cd7c7110fef851d7c567a6b6e09942feb7d"},
{file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabbfc59f2c6edba2a6622c647b716e34e8e3867e0ab975412c5c2f79b82da2"},
{file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8447d2d39b5abe381419319f942de20b7ecd60ce86f16a23b0698f22e1b70008"},
{file = "regex-2024.11.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:da8f5fc57d1933de22a9e23eec290a0d8a5927a5370d24bda9a6abe50683fe62"},
{file = "regex-2024.11.6-cp310-cp310-win32.whl", hash = "sha256:b489578720afb782f6ccf2840920f3a32e31ba28a4b162e13900c3e6bd3f930e"},
{file = "regex-2024.11.6-cp310-cp310-win_amd64.whl", hash = "sha256:5071b2093e793357c9d8b2929dfc13ac5f0a6c650559503bb81189d0a3814519"},
{file = "regex-2024.11.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5478c6962ad548b54a591778e93cd7c456a7a29f8eca9c49e4f9a806dcc5d638"},
{file = "regex-2024.11.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c89a8cc122b25ce6945f0423dc1352cb9593c68abd19223eebbd4e56612c5b7"},
{file = "regex-2024.11.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94d87b689cdd831934fa3ce16cc15cd65748e6d689f5d2b8f4f4df2065c9fa20"},
{file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1062b39a0a2b75a9c694f7a08e7183a80c63c0d62b301418ffd9c35f55aaa114"},
{file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:167ed4852351d8a750da48712c3930b031f6efdaa0f22fa1933716bfcd6bf4a3"},
{file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d548dafee61f06ebdb584080621f3e0c23fff312f0de1afc776e2a2ba99a74f"},
{file = "regex-2024.11.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a19f302cd1ce5dd01a9099aaa19cae6173306d1302a43b627f62e21cf18ac0"},
{file = "regex-2024.11.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bec9931dfb61ddd8ef2ebc05646293812cb6b16b60cf7c9511a832b6f1854b55"},
{file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9714398225f299aa85267fd222f7142fcb5c769e73d7733344efc46f2ef5cf89"},
{file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:202eb32e89f60fc147a41e55cb086db2a3f8cb82f9a9a88440dcfc5d37faae8d"},
{file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:4181b814e56078e9b00427ca358ec44333765f5ca1b45597ec7446d3a1ef6e34"},
{file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:068376da5a7e4da51968ce4c122a7cd31afaaec4fccc7856c92f63876e57b51d"},
{file = "regex-2024.11.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f2c4184420d881a3475fb2c6f4d95d53a8d50209a2500723d831036f7c45"},
{file = "regex-2024.11.6-cp311-cp311-win32.whl", hash = "sha256:c36f9b6f5f8649bb251a5f3f66564438977b7ef8386a52460ae77e6070d309d9"},
{file = "regex-2024.11.6-cp311-cp311-win_amd64.whl", hash = "sha256:02e28184be537f0e75c1f9b2f8847dc51e08e6e171c6bde130b2687e0c33cf60"},
{file = "regex-2024.11.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:52fb28f528778f184f870b7cf8f225f5eef0a8f6e3778529bdd40c7b3920796a"},
{file = "regex-2024.11.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:fdd6028445d2460f33136c55eeb1f601ab06d74cb3347132e1c24250187500d9"},
{file = "regex-2024.11.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805e6b60c54bf766b251e94526ebad60b7de0c70f70a4e6210ee2891acb70bf2"},
{file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b85c2530be953a890eaffde05485238f07029600e8f098cdf1848d414a8b45e4"},
{file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb26437975da7dc36b7efad18aa9dd4ea569d2357ae6b783bf1118dabd9ea577"},
{file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abfa5080c374a76a251ba60683242bc17eeb2c9818d0d30117b4486be10c59d3"},
{file = "regex-2024.11.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b7fa6606c2881c1db9479b0eaa11ed5dfa11c8d60a474ff0e095099f39d98e"},
{file = "regex-2024.11.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c32f75920cf99fe6b6c539c399a4a128452eaf1af27f39bce8909c9a3fd8cbe"},
{file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:982e6d21414e78e1f51cf595d7f321dcd14de1f2881c5dc6a6e23bbbbd68435e"},
{file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a7c2155f790e2fb448faed6dd241386719802296ec588a8b9051c1f5c481bc29"},
{file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149f5008d286636e48cd0b1dd65018548944e495b0265b45e1bffecce1ef7f39"},
{file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e5364a4502efca094731680e80009632ad6624084aff9a23ce8c8c6820de3e51"},
{file = "regex-2024.11.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0a86e7eeca091c09e021db8eb72d54751e527fa47b8d5787caf96d9831bd02ad"},
{file = "regex-2024.11.6-cp312-cp312-win32.whl", hash = "sha256:32f9a4c643baad4efa81d549c2aadefaeba12249b2adc5af541759237eee1c54"},
{file = "regex-2024.11.6-cp312-cp312-win_amd64.whl", hash = "sha256:a93c194e2df18f7d264092dc8539b8ffb86b45b899ab976aa15d48214138e81b"},
{file = "regex-2024.11.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a6ba92c0bcdf96cbf43a12c717eae4bc98325ca3730f6b130ffa2e3c3c723d84"},
{file = "regex-2024.11.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:525eab0b789891ac3be914d36893bdf972d483fe66551f79d3e27146191a37d4"},
{file = "regex-2024.11.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:086a27a0b4ca227941700e0b31425e7a28ef1ae8e5e05a33826e17e47fbfdba0"},
{file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde01f35767c4a7899b7eb6e823b125a64de314a8ee9791367c9a34d56af18d0"},
{file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b583904576650166b3d920d2bcce13971f6f9e9a396c673187f49811b2769dc7"},
{file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c4de13f06a0d54fa0d5ab1b7138bfa0d883220965a29616e3ea61b35d5f5fc7"},
{file = "regex-2024.11.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cde6e9f2580eb1665965ce9bf17ff4952f34f5b126beb509fee8f4e994f143c"},
{file = "regex-2024.11.6-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d7f453dca13f40a02b79636a339c5b62b670141e63efd511d3f8f73fba162b3"},
{file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59dfe1ed21aea057a65c6b586afd2a945de04fc7db3de0a6e3ed5397ad491b07"},
{file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b97c1e0bd37c5cd7902e65f410779d39eeda155800b65fc4d04cc432efa9bc6e"},
{file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d1e379028e0fc2ae3654bac3cbbef81bf3fd571272a42d56c24007979bafb6"},
{file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:13291b39131e2d002a7940fb176e120bec5145f3aeb7621be6534e46251912c4"},
{file = "regex-2024.11.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4f51f88c126370dcec4908576c5a627220da6c09d0bff31cfa89f2523843316d"},
{file = "regex-2024.11.6-cp313-cp313-win32.whl", hash = "sha256:63b13cfd72e9601125027202cad74995ab26921d8cd935c25f09c630436348ff"},
{file = "regex-2024.11.6-cp313-cp313-win_amd64.whl", hash = "sha256:2b3361af3198667e99927da8b84c1b010752fa4b1115ee30beaa332cabc3ef1a"},
{file = "regex-2024.11.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:3a51ccc315653ba012774efca4f23d1d2a8a8f278a6072e29c7147eee7da446b"},
{file = "regex-2024.11.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ad182d02e40de7459b73155deb8996bbd8e96852267879396fb274e8700190e3"},
{file = "regex-2024.11.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba9b72e5643641b7d41fa1f6d5abda2c9a263ae835b917348fc3c928182ad467"},
{file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40291b1b89ca6ad8d3f2b82782cc33807f1406cf68c8d440861da6304d8ffbbd"},
{file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cdf58d0e516ee426a48f7b2c03a332a4114420716d55769ff7108c37a09951bf"},
{file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a36fdf2af13c2b14738f6e973aba563623cb77d753bbbd8d414d18bfaa3105dd"},
{file = "regex-2024.11.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cee317bfc014c2419a76bcc87f071405e3966da434e03e13beb45f8aced1a6"},
{file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50153825ee016b91549962f970d6a4442fa106832e14c918acd1c8e479916c4f"},
{file = "regex-2024.11.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea1bfda2f7162605f6e8178223576856b3d791109f15ea99a9f95c16a7636fb5"},
{file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:df951c5f4a1b1910f1a99ff42c473ff60f8225baa1cdd3539fe2819d9543e9df"},
{file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:072623554418a9911446278f16ecb398fb3b540147a7828c06e2011fa531e773"},
{file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f654882311409afb1d780b940234208a252322c24a93b442ca714d119e68086c"},
{file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:89d75e7293d2b3e674db7d4d9b1bee7f8f3d1609428e293771d1a962617150cc"},
{file = "regex-2024.11.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f65557897fc977a44ab205ea871b690adaef6b9da6afda4790a2484b04293a5f"},
{file = "regex-2024.11.6-cp38-cp38-win32.whl", hash = "sha256:6f44ec28b1f858c98d3036ad5d7d0bfc568bdd7a74f9c24e25f41ef1ebfd81a4"},
{file = "regex-2024.11.6-cp38-cp38-win_amd64.whl", hash = "sha256:bb8f74f2f10dbf13a0be8de623ba4f9491faf58c24064f32b65679b021ed0001"},
{file = "regex-2024.11.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5704e174f8ccab2026bd2f1ab6c510345ae8eac818b613d7d73e785f1310f839"},
{file = "regex-2024.11.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:220902c3c5cc6af55d4fe19ead504de80eb91f786dc102fbd74894b1551f095e"},
{file = "regex-2024.11.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7e351589da0850c125f1600a4c4ba3c722efefe16b297de54300f08d734fbf"},
{file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5056b185ca113c88e18223183aa1a50e66507769c9640a6ff75859619d73957b"},
{file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e34b51b650b23ed3354b5a07aab37034d9f923db2a40519139af34f485f77d0"},
{file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5670bce7b200273eee1840ef307bfa07cda90b38ae56e9a6ebcc9f50da9c469b"},
{file = "regex-2024.11.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08986dce1339bc932923e7d1232ce9881499a0e02925f7402fb7c982515419ef"},
{file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93c0b12d3d3bc25af4ebbf38f9ee780a487e8bf6954c115b9f015822d3bb8e48"},
{file = "regex-2024.11.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:764e71f22ab3b305e7f4c21f1a97e1526a25ebdd22513e251cf376760213da13"},
{file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f056bf21105c2515c32372bbc057f43eb02aae2fda61052e2f7622c801f0b4e2"},
{file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69ab78f848845569401469da20df3e081e6b5a11cb086de3eed1d48f5ed57c95"},
{file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:86fddba590aad9208e2fa8b43b4c098bb0ec74f15718bb6a704e3c63e2cef3e9"},
{file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:684d7a212682996d21ca12ef3c17353c021fe9de6049e19ac8481ec35574a70f"},
{file = "regex-2024.11.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a03e02f48cd1abbd9f3b7e3586d97c8f7a9721c436f51a5245b3b9483044480b"},
{file = "regex-2024.11.6-cp39-cp39-win32.whl", hash = "sha256:41758407fc32d5c3c5de163888068cfee69cb4c2be844e7ac517a52770f9af57"},
{file = "regex-2024.11.6-cp39-cp39-win_amd64.whl", hash = "sha256:b2837718570f95dd41675328e111345f9b7095d821bac435aac173ac80b19983"},
{file = "regex-2024.11.6.tar.gz", hash = "sha256:7ab159b063c52a0333c884e4679f8d7a85112ee3078fe3d9004b2dd875585519"},
]
[[package]]
name = "requests"
version = "2.32.4"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"},
{file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset_normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<3"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "rich"
version = "14.2.0"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false
python-versions = ">=3.8.0"
groups = ["dev"]
files = [
{file = "rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd"},
{file = "rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4"},
]
[package.dependencies]
markdown-it-py = ">=2.2.0"
pygments = ">=2.13.0,<3.0.0"
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "ruff"
version = "0.8.2"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d"},
{file = "ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5"},
{file = "ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c"},
{file = "ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f"},
{file = "ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897"},
{file = "ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58"},
{file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29"},
{file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248"},
{file = "ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93"},
{file = "ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d"},
{file = "ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0"},
{file = "ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa"},
{file = "ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f"},
{file = "ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22"},
{file = "ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1"},
{file = "ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea"},
{file = "ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8"},
{file = "ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5"},
]
[[package]]
name = "setuptools"
version = "78.1.1"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "setuptools-78.1.1-py3-none-any.whl", hash = "sha256:c3a9c4211ff4c309edb8b8c4f1cbfa7ae324c4ba9f91ff254e3d305b9fd54561"},
{file = "setuptools-78.1.1.tar.gz", hash = "sha256:fcc17fd9cd898242f6b4adfaca46137a9edef687f43e6f78469692a5e70d851d"},
]
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""]
core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"]
[[package]]
name = "six"
version = "1.17.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
groups = ["dev"]
files = [
{file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"},
{file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"},
]
[[package]]
name = "sniffio"
version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
]
[[package]]
name = "sqlalchemy"
version = "2.0.44"
description = "Database Abstraction Library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "SQLAlchemy-2.0.44-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:471733aabb2e4848d609141a9e9d56a427c0a038f4abf65dd19d7a21fd563632"},
{file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48bf7d383a35e668b984c805470518b635d48b95a3c57cb03f37eaa3551b5f9f"},
{file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf4bb6b3d6228fcf3a71b50231199fb94d2dd2611b66d33be0578ea3e6c2726"},
{file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:e998cf7c29473bd077704cea3577d23123094311f59bdc4af551923b168332b1"},
{file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ebac3f0b5732014a126b43c2b7567f2f0e0afea7d9119a3378bde46d3dcad88e"},
{file = "SQLAlchemy-2.0.44-cp37-cp37m-win32.whl", hash = "sha256:3255d821ee91bdf824795e936642bbf43a4c7cedf5d1aed8d24524e66843aa74"},
{file = "SQLAlchemy-2.0.44-cp37-cp37m-win_amd64.whl", hash = "sha256:78e6c137ba35476adb5432103ae1534f2f5295605201d946a4198a0dea4b38e7"},
{file = "sqlalchemy-2.0.44-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c77f3080674fc529b1bd99489378c7f63fcb4ba7f8322b79732e0258f0ea3ce"},
{file = "sqlalchemy-2.0.44-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26ef74ba842d61635b0152763d057c8d48215d5be9bb8b7604116a059e9985"},
{file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a172b31785e2f00780eccab00bc240ccdbfdb8345f1e6063175b3ff12ad1b0"},
{file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9480c0740aabd8cb29c329b422fb65358049840b34aba0adf63162371d2a96e"},
{file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17835885016b9e4d0135720160db3095dc78c583e7b902b6be799fb21035e749"},
{file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cbe4f85f50c656d753890f39468fcd8190c5f08282caf19219f684225bfd5fd2"},
{file = "sqlalchemy-2.0.44-cp310-cp310-win32.whl", hash = "sha256:2fcc4901a86ed81dc76703f3b93ff881e08761c63263c46991081fd7f034b165"},
{file = "sqlalchemy-2.0.44-cp310-cp310-win_amd64.whl", hash = "sha256:9919e77403a483ab81e3423151e8ffc9dd992c20d2603bf17e4a8161111e55f5"},
{file = "sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd"},
{file = "sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa"},
{file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e"},
{file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e"},
{file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399"},
{file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b"},
{file = "sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3"},
{file = "sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5"},
{file = "sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250"},
{file = "sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29"},
{file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44"},
{file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1"},
{file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7"},
{file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d"},
{file = "sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4"},
{file = "sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e"},
{file = "sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1"},
{file = "sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45"},
{file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976"},
{file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c"},
{file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d"},
{file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40"},
{file = "sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73"},
{file = "sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e"},
{file = "sqlalchemy-2.0.44-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fc44e5965ea46909a416fff0af48a219faefd5773ab79e5f8a5fcd5d62b2667"},
{file = "sqlalchemy-2.0.44-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dc8b3850d2a601ca2320d081874033684e246d28e1c5e89db0864077cfc8f5a9"},
{file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d733dec0614bb8f4bcb7c8af88172b974f685a31dc3a65cca0527e3120de5606"},
{file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22be14009339b8bc16d6b9dc8780bacaba3402aa7581658e246114abbd2236e3"},
{file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:357bade0e46064f88f2c3a99808233e67b0051cdddf82992379559322dfeb183"},
{file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4848395d932e93c1595e59a8672aa7400e8922c39bb9b0668ed99ac6fa867822"},
{file = "sqlalchemy-2.0.44-cp38-cp38-win32.whl", hash = "sha256:2f19644f27c76f07e10603580a47278abb2a70311136a7f8fd27dc2e096b9013"},
{file = "sqlalchemy-2.0.44-cp38-cp38-win_amd64.whl", hash = "sha256:1df4763760d1de0dfc8192cc96d8aa293eb1a44f8f7a5fbe74caf1b551905c5e"},
{file = "sqlalchemy-2.0.44-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7027414f2b88992877573ab780c19ecb54d3a536bef3397933573d6b5068be4"},
{file = "sqlalchemy-2.0.44-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fe166c7d00912e8c10d3a9a0ce105569a31a3d0db1a6e82c4e0f4bf16d5eca9"},
{file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3caef1ff89b1caefc28f0368b3bde21a7e3e630c2eddac16abd9e47bd27cc36a"},
{file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc2856d24afa44295735e72f3c75d6ee7fdd4336d8d3a8f3d44de7aa6b766df2"},
{file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:11bac86b0deada30b6b5f93382712ff0e911fe8d31cb9bf46e6b149ae175eff0"},
{file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d18cd0e9a0f37c9f4088e50e3839fcb69a380a0ec957408e0b57cff08ee0a26"},
{file = "sqlalchemy-2.0.44-cp39-cp39-win32.whl", hash = "sha256:9e9018544ab07614d591a26c1bd4293ddf40752cc435caf69196740516af7100"},
{file = "sqlalchemy-2.0.44-cp39-cp39-win_amd64.whl", hash = "sha256:8e0e4e66fd80f277a8c3de016a81a554e76ccf6b8d881ee0b53200305a8433f6"},
{file = "sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05"},
{file = "sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22"},
]
[package.dependencies]
greenlet = {version = ">=1", optional = true, markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\""}
typing-extensions = ">=4.6.0"
[package.extras]
aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"]
aioodbc = ["aioodbc", "greenlet (>=1)"]
aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"]
asyncio = ["greenlet (>=1)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)"]
mysql = ["mysqlclient (>=1.4.0)"]
mysql-connector = ["mysql-connector-python"]
oracle = ["cx_oracle (>=8)"]
oracle-oracledb = ["oracledb (>=1.0.1)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"]
postgresql-pg8000 = ["pg8000 (>=1.29.1)"]
postgresql-psycopg = ["psycopg (>=3.0.7)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"]
pymysql = ["pymysql"]
sqlcipher = ["sqlcipher3_binary"]
[[package]]
name = "starlette"
version = "0.49.1"
description = "The little ASGI library that shines."
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "starlette-0.49.1-py3-none-any.whl", hash = "sha256:d92ce9f07e4a3caa3ac13a79523bd18e3bc0042bb8ff2d759a8e7dd0e1859875"},
{file = "starlette-0.49.1.tar.gz", hash = "sha256:481a43b71e24ed8c43b11ea02f5353d77840e01480881b8cb5a26b8cae64a8cb"},
]
[package.dependencies]
anyio = ">=3.6.2,<5"
typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.13\""}
[package.extras]
full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"]
[[package]]
name = "tomli"
version = "2.2.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
markers = "python_full_version <= \"3.11.0a6\""
files = [
{file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
{file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
{file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
{file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
{file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
{file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
{file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
{file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
{file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
{file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
{file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
{file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
{file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"},
{file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"},
{file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"},
{file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"},
{file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"},
{file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"},
{file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"},
{file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"},
{file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"},
{file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"},
{file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
{file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
]
[[package]]
name = "types-aiofiles"
version = "23.2.0.20240623"
description = "Typing stubs for aiofiles"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "types-aiofiles-23.2.0.20240623.tar.gz", hash = "sha256:d515b2fa46bf894aff45a364a704f050de3898344fd6c5994d58dc8b59ab71e6"},
{file = "types_aiofiles-23.2.0.20240623-py3-none-any.whl", hash = "sha256:70597b29fc40c8583b6d755814b2cd5fcdb6785622e82d74ef499f9066316e08"},
]
[[package]]
name = "types-cryptography"
version = "3.3.23.2"
description = "Typing stubs for cryptography"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "types-cryptography-3.3.23.2.tar.gz", hash = "sha256:09cc53f273dd4d8c29fa7ad11fefd9b734126d467960162397bc5e3e604dea75"},
{file = "types_cryptography-3.3.23.2-py3-none-any.whl", hash = "sha256:b965d548f148f8e87f353ccf2b7bd92719fdf6c845ff7cedf2abb393a0643e4f"},
]
[[package]]
name = "types-enum34"
version = "1.1.8"
description = "Typing stubs for enum34"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "types-enum34-1.1.8.tar.gz", hash = "sha256:6f9c769641d06d73a55e11c14d38ac76fcd37eb545ce79cebb6eec9d50a64110"},
{file = "types_enum34-1.1.8-py3-none-any.whl", hash = "sha256:05058c7a495f6bfaaca0be4aeac3cce5cdd80a2bad2aab01fd49a20bf4a0209d"},
]
[[package]]
name = "types-ipaddress"
version = "1.0.8"
description = "Typing stubs for ipaddress"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "types-ipaddress-1.0.8.tar.gz", hash = "sha256:a03df3be5935e50ba03fa843daabff539a041a28e73e0fce2c5705bee54d3841"},
{file = "types_ipaddress-1.0.8-py3-none-any.whl", hash = "sha256:4933b74da157ba877b1a705d64f6fa7742745e9ffd65e51011f370c11ebedb55"},
]
[[package]]
name = "types-orjson"
version = "3.6.2"
description = "Typing stubs for orjson"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "types-orjson-3.6.2.tar.gz", hash = "sha256:cf9afcc79a86325c7aff251790338109ed6f6b1bab09d2d4262dd18c85a3c638"},
{file = "types_orjson-3.6.2-py3-none-any.whl", hash = "sha256:22ee9a79236b6b0bfb35a0684eded62ad930a88a56797fa3c449b026cf7dbfe4"},
]
[[package]]
name = "types-pymysql"
version = "1.1.0.20241103"
description = "Typing stubs for PyMySQL"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "types-PyMySQL-1.1.0.20241103.tar.gz", hash = "sha256:a7628542919a0ba87625fb79eefb2a2de45fb4ad32afe6e561e8f2f27fb58b8c"},
{file = "types_PyMySQL-1.1.0.20241103-py3-none-any.whl", hash = "sha256:1a32efd8a74b5bf74c4de92a86c1cc6edaf3802dcfd5546635ab501eb5e3c096"},
]
[[package]]
name = "types-requests"
version = "2.32.0.20241016"
description = "Typing stubs for requests"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"},
{file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"},
]
[package.dependencies]
urllib3 = ">=2"
[[package]]
name = "types-toml"
version = "0.10.8.20240310"
description = "Typing stubs for toml"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"},
{file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"},
]
[[package]]
name = "types-ujson"
version = "5.10.0.20240515"
description = "Typing stubs for ujson"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "types-ujson-5.10.0.20240515.tar.gz", hash = "sha256:ceae7127f0dafe4af5dd0ecf98ee13e9d75951ef963b5c5a9b7ea92e0d71f0d7"},
{file = "types_ujson-5.10.0.20240515-py3-none-any.whl", hash = "sha256:02bafc36b3a93d2511757a64ff88bd505e0a57fba08183a9150fbcfcb2015310"},
]
[[package]]
name = "typing-extensions"
version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
name = "typing-inspection"
version = "0.4.1"
description = "Runtime typing introspection tools"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
{file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"},
{file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"},
]
[package.dependencies]
typing-extensions = ">=4.12.0"
[[package]]
name = "urllib3"
version = "2.6.3"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4"},
{file = "urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed"},
]
[package.extras]
brotli = ["brotli (>=1.2.0) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=1.2.0.0) ; platform_python_implementation != \"CPython\""]
h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["backports-zstd (>=1.0.0) ; python_version < \"3.14\""]
[[package]]
name = "verspec"
version = "0.1.0"
description = "Flexible version handling"
optional = false
python-versions = "*"
groups = ["dev"]
files = [
{file = "verspec-0.1.0-py3-none-any.whl", hash = "sha256:741877d5633cc9464c45a469ae2a31e801e6dbbaa85b9675d481cda100f11c31"},
{file = "verspec-0.1.0.tar.gz", hash = "sha256:c4504ca697b2056cdb4bfa7121461f5a0e81809255b41c03dda4ba823637c01e"},
]
[package.extras]
test = ["coverage", "flake8 (>=3.7)", "mypy", "pretend", "pytest"]
[[package]]
name = "virtualenv"
version = "20.28.0"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"},
{file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"},
]
[package.dependencies]
distlib = ">=0.3.7,<1"
filelock = ">=3.12.2,<4"
platformdirs = ">=3.9.1,<5"
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""]
[[package]]
name = "watchdog"
version = "4.0.2"
description = "Filesystem events monitoring"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"},
{file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"},
{file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"},
{file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"},
{file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"},
{file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"},
{file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"},
{file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"},
{file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"},
{file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"},
{file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"},
{file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"},
{file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"},
{file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"},
{file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"},
{file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"},
{file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"},
{file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"},
{file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"},
{file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"},
{file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"},
{file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"},
{file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"},
{file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"},
{file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"},
{file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"},
{file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"},
{file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"},
{file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"},
{file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"},
{file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"},
{file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"},
{file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"},
{file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"},
{file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"},
]
[package.extras]
watchmedo = ["PyYAML (>=3.10)"]
[[package]]
name = "yappi"
version = "1.6.10"
description = "Yet Another Python Profiler"
optional = false
python-versions = ">=3.6"
groups = ["dev"]
files = [
{file = "yappi-1.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f03127742746ec4cf7e422b08212daf094505ab7f5d725d7b273ed3c475c3d9"},
{file = "yappi-1.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bbafb779c3f90edd09fd34733859226785618adee3179d5949dbba2e90f550a"},
{file = "yappi-1.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f326045442f7d63aa54dc4a18eda358b186af3316ae52619dd606058fb3b4182"},
{file = "yappi-1.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:737e3cb6bb05f326eb63000663a4dc08dc08cc9827f7634445250c9610e5e717"},
{file = "yappi-1.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7c01a2bd8abc3b6d33ae60dea26f97e2372e0087a747289bbab0fe67c8ac8925"},
{file = "yappi-1.6.10-cp310-cp310-win32.whl", hash = "sha256:cf117a9f733e0d8386bc8c454c11b275999c4bf559d742cbb8b60ace1d813f23"},
{file = "yappi-1.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:402252d543e47464707ea5d7e4a63c7e77ce81cb58b8559c8883e67ae483911c"},
{file = "yappi-1.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:20b8289e8cca781e948f72d86c03b308e077abeec53ec60080f77319041e0511"},
{file = "yappi-1.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4bc9a30b162cb0e13d6400476fa05c272992bd359592e9bba1a570878d9e155c"},
{file = "yappi-1.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40aa421ea7078795ed2f0e6bae3f8f64f6cd5019c885a12c613b44dd1fc598b4"},
{file = "yappi-1.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0d62741c0ac883067e40481ab89ddd9e004292dbd22ac5992cf45745bf28ccc3"},
{file = "yappi-1.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1cf46ebe43ac95f8736618a5f0ac763c7502a3aa964a1dda083d9e9c1bf07b12"},
{file = "yappi-1.6.10-cp311-cp311-win32.whl", hash = "sha256:ff3688aa99b08ee10ced478b7255ac03865a8b5c0677482056acfe4d4f56e45f"},
{file = "yappi-1.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:4bd4f820e84d823724b8de4bf6857025e9e6c953978dd32485e054cf7de0eda7"},
{file = "yappi-1.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:32c6d928604d7a236090bc36d324f309fe8344c91123bb84e37c43f6677adddc"},
{file = "yappi-1.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9683c40de7e4ddff225068032cd97a6d928e4beddd9c7cf6515325be8ac28036"},
{file = "yappi-1.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:733a212014f2b44673ed62be53b3d4dd458844cd2008ba107f27a3293e42f43a"},
{file = "yappi-1.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7d80938e566ac6329daa3b036fdf7bd34488010efcf0a65169a44603878daa4e"},
{file = "yappi-1.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:01705971b728a4f95829b723d08883c7623ec275f4066f4048b28dc0151fe0af"},
{file = "yappi-1.6.10-cp312-cp312-win32.whl", hash = "sha256:8dd13a430b046e2921ddf63d992da97968724b41a03e68292f06a2afa11c9d6e"},
{file = "yappi-1.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:a50eb3aec893c40554f8f811d3341af266d844e7759f7f7abfcdba2744885ea3"},
{file = "yappi-1.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:944df9ebc6b283d6591a6b5f4c586d0eb9c6131c915f1b20fb36127ade83720d"},
{file = "yappi-1.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3736ea6458edbabd96918d88e2963594823e4ab4c58d62a52ef81f6b5839ec19"},
{file = "yappi-1.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27bbc3311a3662231cff395d38061683fac5c538f3bab6796ff05511d2cce43"},
{file = "yappi-1.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:354cf94d659302b421b13c03487f2f1bce969b97b85fba88afb11f2ef83c35f3"},
{file = "yappi-1.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1d82839835ae2c291b88fb56d82f80c88c00d76df29f3c1ed050db73b553bef0"},
{file = "yappi-1.6.10-cp313-cp313-win32.whl", hash = "sha256:fc84074575afcc5a2a712e132c0b51541b7434b3099be99f573964ef3b6064a8"},
{file = "yappi-1.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:334b31dfefae02bc28b7cd50953aaaae3292e40c15efb613792e4a587281a161"},
{file = "yappi-1.6.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f0b4bbdbaeda9ae84364a26cef6ccc512c44f3131a0b074f8892c5147f2e3bea"},
{file = "yappi-1.6.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9b3e1ce82b2bf30eeab19df7544d2caf5d7dc06bd7196ee2249a94e2081a5ae"},
{file = "yappi-1.6.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d95ce88d0b533a44a6d9521b983e3412e5c50d7fd152f2155764effad4ecf7f"},
{file = "yappi-1.6.10-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:ce9b908e99368c14bcdc1e198fc2ffe0cf42191ebfcec5458d10c4335f2abaf6"},
{file = "yappi-1.6.10-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:307d681dd0cdaa7986e3b22115e41597f92778db03ba9be5096cfcb13929c5e9"},
{file = "yappi-1.6.10-cp36-cp36m-win32.whl", hash = "sha256:de7aeaae96ce5d727d2d3f905dfbdbb512c4be1f7ef5178facac0835da63738a"},
{file = "yappi-1.6.10-cp36-cp36m-win_amd64.whl", hash = "sha256:e234dfd385fefaecc640235448d912e35f6a1400bc73be723744e901f2432527"},
{file = "yappi-1.6.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:215964abb3818124bc638cf5456ca311e70188146afb30336cced0fc4ef42f5b"},
{file = "yappi-1.6.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3752ab9480f28709427d6077d220d963ed7caa84e18fd0f404022f4076850b0e"},
{file = "yappi-1.6.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1305d50e805358937b022d455a17127a7ea2eb8eaf7595e0d06b0760f4bcc58"},
{file = "yappi-1.6.10-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:c713b660a23f4f8a33ea08a168f9f94d92b0383683e8ae3e9467587b5a8a0eae"},
{file = "yappi-1.6.10-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:3aa33acd51ba1b5d81e5d6ec305d144531d215635b9dfd8ee1d57688c77725af"},
{file = "yappi-1.6.10-cp37-cp37m-win32.whl", hash = "sha256:228ab550d53b5e37d618b42f5085e504376963b48f867d45d0fdc8a1e0c811d2"},
{file = "yappi-1.6.10-cp37-cp37m-win_amd64.whl", hash = "sha256:2246e57e1ab7d11a184042fe5726fbffca8c1a59c5eb01d1a043741403bf844d"},
{file = "yappi-1.6.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b1795ea62ee9a39c1cff01a2c477b8bd5b1ca95c17d258efbf770b73eb62b2b8"},
{file = "yappi-1.6.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ba5c27a82cdd84e5102b789ab5061431944e3dee27e0970c3167b3bce78b262"},
{file = "yappi-1.6.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d229ab4f2711aeed440037d9007db79d776e79c552ecde23b0b68591fa7ecccf"},
{file = "yappi-1.6.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:8a4bd5dd1c50e81440c712e6f43ac682768690d2dd0307665910a52db2d69175"},
{file = "yappi-1.6.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:49f1f8b16d6f42a79a06ae5268f39e71de3648d6797471dc71d80d91be4a6484"},
{file = "yappi-1.6.10-cp38-cp38-win32.whl", hash = "sha256:dec8fb0125fe636f9218ec3ce022d8435299beadfee1def82ee75e11bce38ebd"},
{file = "yappi-1.6.10-cp38-cp38-win_amd64.whl", hash = "sha256:6822f33ae4474eb9ffc8865e64cba70daef23832be36b4d63d1d8dfd890101cf"},
{file = "yappi-1.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:198831ccab42295ae2be265d422fdc0d9ccc8ae3e074e7c70fb58731e8181221"},
{file = "yappi-1.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:721a67aa9f110d509e2729cb145b79b87fe28d42e253476a524fa781aff41c3c"},
{file = "yappi-1.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2e08a11f7e6b49ef09659506ac3bf0484881d6f634c6026c6bcbe3d345ee7c2"},
{file = "yappi-1.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ba1cd02fd914441d916db2972c3657711b2d7843cdd481e16244dee5870579af"},
{file = "yappi-1.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2594ab790a9db37223e7861ec9cdf74d1edf05a78b31a8806ff24abcde668bea"},
{file = "yappi-1.6.10-cp39-cp39-win32.whl", hash = "sha256:4efb7ee80a1ac4511e900ebced03aea761ab129269b0d571586a25d3a71e7a35"},
{file = "yappi-1.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:f3f833bae26d1046610a08ddb0c968311056d07c8930ab11985e1e38c97cb91e"},
{file = "yappi-1.6.10.tar.gz", hash = "sha256:463b822727658937bd95a7d80ca9758605b8cd0014e004e9e520ec9cb4db0c92"},
]
[package.extras]
test = ["gevent (>=20.6.2)"]
[[package]]
name = "zipp"
version = "3.20.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
{file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
{file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
]
[package.extras]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
type = ["pytest-mypy"]
[extras]
aiopg = ["aiopg", "psycopg2-binary"]
all = ["PyMySQL", "aiomysql", "aiopg", "aiosqlite", "asyncpg", "cryptography", "mysqlclient", "orjson", "psycopg2-binary"]
crypto = ["cryptography"]
mysql = ["PyMySQL", "aiomysql"]
orjson = ["orjson"]
postgres = ["asyncpg", "psycopg2-binary"]
postgresql = ["asyncpg", "psycopg2-binary"]
sqlite = ["aiosqlite"]
[metadata]
lock-version = "2.1"
python-versions = "^3.9.0"
content-hash = "33a66acc799186911ee0041106253855ca1400774e5846f50bbe181602dc83f6"
collerek-ormar-c09209a/pyproject.toml 0000664 0000000 0000000 00000010251 15130200524 0017664 0 ustar 00root root 0000000 0000000 [project]
name = "ormar"
[tool.poetry]
name = "ormar"
version = "0.21.0"
description = "An async ORM with fastapi in mind and pydantic validation."
authors = ["Radosław Drążkiewicz "]
license = "MIT"
readme = "README.md"
homepage = "https://github.com/collerek/ormar"
repository = "https://github.com/collerek/ormar"
documentation = "https://collerek.github.io/ormar/"
packages = [
{ include="ormar" }
]
keywords = [
"orm",
"sqlalchemy",
"fastapi",
"pydantic",
"databases",
"async",
"alembic",
]
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Framework :: AsyncIO",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3 :: Only",
]
[tool.poetry.dependencies]
python = "^3.9.0"
databases = "^0.9.0"
pydantic = "^2.11.9"
SQLAlchemy = {version = "^2.0.40", extras = ["asyncio"]}
cryptography = { version = ">=41.0.3,<44.0.0", optional = true }
# Async database drivers
aiosqlite = { version = ">=0.19,<0.21", optional = true }
aiomysql = { version = ">=0.1.0", optional = true }
aiopg = { version = "^1.3.3", optional = true }
asyncpg = { version = ">=0.28,<0.31", optional = true }
# Sync database drivers for standard tooling around setup/teardown/migrations.
psycopg2-binary = { version = "^2.9.1", optional = true }
mysqlclient = { version = "^2.1.0", optional = true }
PyMySQL = { version = "^1.1.0", optional = true }
[tool.poetry.dependencies.orjson]
version = ">=3.6.4"
optional = true
[tool.poetry.extras]
postgresql = ["asyncpg", "psycopg2-binary"]
postgres = ["asyncpg", "psycopg2-binary"]
aiopg = ["aiopg", "psycopg2-binary"]
mysql = ["aiomysql", "PyMySQL"]
sqlite = ["aiosqlite"]
orjson = ["orjson"]
crypto = ["cryptography"]
all = [
"aiosqlite",
"asyncpg",
"aiopg",
"psycopg2-binary",
"aiomysql",
"mysqlclient",
"PyMySQL",
"orjson",
"cryptography",
]
[tool.poetry.group.dev.dependencies]
# Testing
pytest = ">=7.4.4,<9.0.0"
pytest-cov = ">=4,<6"
codecov = "^2.1.13"
pytest-asyncio = ">=0.21,<0.24"
fastapi = ">=0.125.0"
black = "^24.1.0"
ruff = ">=0.5.1,<0.8.3"
setuptools = "^78.1.1"
# types
mypy = "^1.8.0"
types-ujson = "^5.7.0"
types-PyMySQL = "^1.0.19"
types-ipaddress = "^1.0.1"
types-enum34 = "^1.1.1"
types-cryptography = "^3.3.23"
types-orjson = "^3.6.1"
types-aiofiles = "^23.2.0"
types-requests = "^2.32.0"
types-toml = "^0.10.8"
# Documentation
mkdocs = "^1.5.3"
mkdocs-material = ">=8.1.2,<9.6"
mkdocs-material-extensions = "^1.2"
mkdocstrings = {version = "==0.26.1", extras = ["python"]}
mkdocs-gen-files = "^0.5.0"
mkdocs-literate-nav = "^0.6.1"
mkdocs-section-index = "^0.3.7"
dataclasses = { version = ">=0.6.0,<0.8 || >0.8,<1.0.0" }
# Performance testing
yappi = "^1.6.0"
pytest-benchmark = "^4.0.0"
nest-asyncio = "^1.6.0"
pre-commit = ">=2.21,<4.0"
httpx = ">=0.28.0"
asgi-lifespan = "^2.1.0"
pydantic-extra-types = "^2.5.0"
watchdog = "<5.0.0"
pytest-codspeed = "^4.2.0"
mike = "^2.0.0"
faker = ">=24.3,<36.0"
email-validator = "^2.1.1"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.mypy]
# TODO: Enable mypy plugin after pydantic release supporting toml file
disallow_untyped_calls = true
disallow_untyped_defs = true
disallow_incomplete_defs = true
[[tool.mypy.overrides]]
module = ["tests.*", "benchmarks.*"]
disallow_untyped_calls = false
disallow_untyped_defs = false
disallow_incomplete_defs = false
[[tool.mypy.overrides]]
module = "docs_src.*"
ignore_errors = true
[[tool.mypy.overrides]]
module = ["sqlalchemy.*", "asyncpg", "nest_asyncio"]
ignore_missing_imports = true
[tool.yapf]
based_on_style = "pep8"
disable_ending_comma_heuristic = true
split_arguments_when_comma_terminated = true
[tool.ruff]
select = ["E", "F", "I"]
ignore = ["E402"]
line-length = 88
src = ["ormar", "tests"]
collerek-ormar-c09209a/scripts/ 0000775 0000000 0000000 00000000000 15130200524 0016440 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/scripts/docker-compose.yml 0000664 0000000 0000000 00000000623 15130200524 0022076 0 ustar 00root root 0000000 0000000 version: '2.1'
services:
postgres:
image: postgres:10.8
environment:
POSTGRES_USER: username
POSTGRES_PASSWORD: password
POSTGRES_DB: testsuite
ports:
- 5432:5432
mysql:
image: mysql:5.7
environment:
MYSQL_USER: username
MYSQL_PASSWORD: password
MYSQL_ROOT_PASSWORD: password
MYSQL_DATABASE: testsuite
ports:
- 3306:3306 collerek-ormar-c09209a/scripts/test.sh 0000775 0000000 0000000 00000000372 15130200524 0017760 0 ustar 00root root 0000000 0000000 #!/bin/sh -e
PACKAGE="ormar"
PREFIX=""
if [ -d 'venv' ] ; then
PREFIX="venv/bin/"
fi
set -x
PYTHONPATH=. ${PREFIX}pytest --ignore venv --cov=${PACKAGE} --cov=tests --cov-report=xml --cov-fail-under=100 --cov-report=term-missing tests/ "${@}"
collerek-ormar-c09209a/scripts/test_docs.sh 0000664 0000000 0000000 00000000243 15130200524 0020762 0 ustar 00root root 0000000 0000000 #!/bin/sh -e
PACKAGE="docs_src"
PREFIX=""
if [ -d 'venv' ] ; then
PREFIX="venv/bin/"
fi
set -x
PYTHONPATH=. ${PREFIX}pytest --ignore venv docs_src/ "${@}"
collerek-ormar-c09209a/tests/ 0000775 0000000 0000000 00000000000 15130200524 0016113 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0020212 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/lifespan.py 0000664 0000000 0000000 00000001436 15130200524 0020272 0 ustar 00root root 0000000 0000000 from contextlib import asynccontextmanager
from typing import AsyncIterator
import pytest
import sqlalchemy
from fastapi import FastAPI
def lifespan(config):
@asynccontextmanager
async def do_lifespan(_: FastAPI) -> AsyncIterator[None]:
if not config.database.is_connected:
await config.database.connect()
yield
if config.database.is_connected:
await config.database.disconnect()
return do_lifespan
def init_tests(config, scope="module"):
@pytest.fixture(autouse=True, scope=scope)
def create_database():
config.engine = sqlalchemy.create_engine(config.database.url._url)
config.metadata.create_all(config.engine)
yield
config.metadata.drop_all(config.engine)
return create_database
collerek-ormar-c09209a/tests/settings.py 0000664 0000000 0000000 00000001156 15130200524 0020330 0 ustar 00root root 0000000 0000000 import os
import databases
import ormar
import sqlalchemy
DATABASE_URL = os.getenv("DATABASE_URL", "sqlite:///test.db")
database_url = databases.DatabaseURL(DATABASE_URL)
if database_url.scheme == "postgresql+aiopg": # pragma no cover
DATABASE_URL = str(database_url.replace(driver=None))
print("USED DB:", DATABASE_URL)
def create_config(**args):
database_ = databases.Database(DATABASE_URL, **args)
metadata_ = sqlalchemy.MetaData()
engine_ = sqlalchemy.create_engine(DATABASE_URL)
return ormar.OrmarConfig(
metadata=metadata_,
database=database_,
engine=engine_,
)
collerek-ormar-c09209a/tests/test_deferred/ 0000775 0000000 0000000 00000000000 15130200524 0020732 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_deferred/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0023031 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_deferred/test_forward_cross_refs.py 0000664 0000000 0000000 00000012215 15130200524 0026240 0 ustar 00root root 0000000 0000000 # type: ignore
from typing import ForwardRef, List, Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
TeacherRef = ForwardRef("Teacher")
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
primary_teacher: TeacherRef = ormar.ForeignKey(
TeacherRef, related_name="own_students"
)
class StudentTeacher(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="students_x_teachers")
class Teacher(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
students = ormar.ManyToMany(
Student, through=StudentTeacher, related_name="teachers"
)
Student.update_forward_refs()
CityRef = ForwardRef("City")
CountryRef = ForwardRef("Country")
class Country(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="countries")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=128)
capital: Optional[CityRef] = ormar.ForeignKey(
CityRef, related_name="capital_city", nullable=True
)
borders: Optional[List[CountryRef]] = ormar.ManyToMany(CountryRef)
class City(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cities")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=128)
country: Country = ormar.ForeignKey(
Country, related_name="cities", skip_reverse=True
)
Country.update_forward_refs()
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_double_relations():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
t1 = await Teacher.objects.create(name="Mr. Jones")
t2 = await Teacher.objects.create(name="Ms. Smith")
t3 = await Teacher.objects.create(name="Mr. Quibble")
s1 = await Student.objects.create(name="Joe", primary_teacher=t1)
s2 = await Student.objects.create(name="Sam", primary_teacher=t1)
s3 = await Student.objects.create(name="Kate", primary_teacher=t2)
s4 = await Student.objects.create(name="Zoe", primary_teacher=t2)
s5 = await Student.objects.create(name="John", primary_teacher=t3)
s6 = await Student.objects.create(name="Anna", primary_teacher=t3)
for t in [t1, t2, t3]:
for s in [s1, s2, s3, s4, s5, s6]:
await t.students.add(s)
jones = (
await Teacher.objects.select_related(["students", "own_students"])
.order_by(["students__name", "own_students__name"])
.get(name="Mr. Jones")
)
assert len(jones.students) == 6
assert jones.students[0].name == "Anna"
assert jones.students[5].name == "Zoe"
assert len(jones.own_students) == 2
assert jones.own_students[0].name == "Joe"
assert jones.own_students[1].name == "Sam"
smith = (
await Teacher.objects.select_related(["students", "own_students"])
.filter(students__name__contains="a")
.order_by(["students__name", "own_students__name"])
.get(name="Ms. Smith")
)
assert len(smith.students) == 3
assert smith.students[0].name == "Anna"
assert smith.students[2].name == "Sam"
assert len(smith.own_students) == 2
assert smith.own_students[0].name == "Kate"
assert smith.own_students[1].name == "Zoe"
quibble = (
await Teacher.objects.select_related(["students", "own_students"])
.filter(students__name__startswith="J")
.order_by(["-students__name", "own_students__name"])
.get(name="Mr. Quibble")
)
assert len(quibble.students) == 2
assert quibble.students[1].name == "Joe"
assert quibble.students[0].name == "John"
assert len(quibble.own_students) == 2
assert quibble.own_students[1].name == "John"
assert quibble.own_students[0].name == "Anna"
@pytest.mark.asyncio
async def test_auto_through_model():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
england = await Country(name="England").save()
france = await Country(name="France").save()
london = await City(name="London", country=england).save()
england.capital = london
await england.update()
await england.borders.add(france)
check = await Country.objects.select_related(["capital", "borders"]).get(
name="England"
)
assert check.name == "England"
assert check.capital.name == "London"
assert check.capital.country.pk == check.pk
assert check.borders[0] == france
collerek-ormar-c09209a/tests/test_deferred/test_forward_refs.py 0000664 0000000 0000000 00000021760 15130200524 0025034 0 ustar 00root root 0000000 0000000 # type: ignore
from typing import ForwardRef, List, Optional
import ormar
import pytest
import pytest_asyncio
import sqlalchemy as sa
from ormar.exceptions import ModelError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
PersonRef = ForwardRef("Person")
class Person(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
supervisor: PersonRef = ormar.ForeignKey(PersonRef, related_name="employees")
Person.update_forward_refs()
GameRef = ForwardRef("Game")
ChildRef = ForwardRef("Child")
ChildFriendRef = ForwardRef("ChildFriend")
class Child(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
favourite_game: GameRef = ormar.ForeignKey(GameRef, related_name="liked_by")
least_favourite_game: GameRef = ormar.ForeignKey(
GameRef, related_name="not_liked_by"
)
friends = ormar.ManyToMany(
ChildRef, through=ChildFriendRef, related_name="also_friends"
)
class ChildFriend(ormar.Model):
ormar_config = base_ormar_config.copy()
class Game(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
Child.update_forward_refs()
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
await ChildFriend.objects.delete(each=True)
await Child.objects.delete(each=True)
await Game.objects.delete(each=True)
await Person.objects.delete(each=True)
@pytest.mark.asyncio
async def test_not_updated_model_raises_errors():
Person2Ref = ForwardRef("Person2")
class Person2(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
supervisor: Person2Ref = ormar.ForeignKey(Person2Ref, related_name="employees")
with pytest.raises(ModelError):
await Person2.objects.create(name="Test")
with pytest.raises(ModelError):
Person2(name="Test")
with pytest.raises(ModelError):
await Person2.objects.get()
@pytest.mark.asyncio
async def test_not_updated_model_m2m_raises_errors():
Person3Ref = ForwardRef("Person3")
class PersonFriend(ormar.Model):
ormar_config = base_ormar_config.copy()
class Person3(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
supervisors: Person3Ref = ormar.ManyToMany(
Person3Ref, through=PersonFriend, related_name="employees"
)
with pytest.raises(ModelError):
await Person3.objects.create(name="Test")
with pytest.raises(ModelError):
Person3(name="Test")
with pytest.raises(ModelError):
await Person3.objects.get()
@pytest.mark.asyncio
async def test_not_updated_model_m2m_through_raises_errors():
PersonPetRef = ForwardRef("PersonPet")
class Pet(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Person4(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
pets: List[Pet] = ormar.ManyToMany(
Pet, through=PersonPetRef, related_name="owners"
)
class PersonPet(ormar.Model):
ormar_config = base_ormar_config.copy()
with pytest.raises(ModelError):
await Person4.objects.create(name="Test")
with pytest.raises(ModelError):
Person4(name="Test")
with pytest.raises(ModelError):
await Person4.objects.get()
def test_proper_field_init():
assert "supervisor" in Person.ormar_config.model_fields
assert Person.ormar_config.model_fields["supervisor"].to == Person
assert "supervisor" in Person.model_fields
assert Person.model_fields["supervisor"].annotation == Optional[Person]
assert "supervisor" in Person.ormar_config.table.columns
assert isinstance(
Person.ormar_config.table.columns["supervisor"].type, sa.sql.sqltypes.Integer
)
assert len(Person.ormar_config.table.columns["supervisor"].foreign_keys) > 0
assert "person_supervisor" in Person.ormar_config.alias_manager._aliases_new
@pytest.mark.asyncio
async def test_self_relation():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
sam = await Person.objects.create(name="Sam")
joe = await Person(name="Joe", supervisor=sam).save()
assert joe.supervisor.name == "Sam"
joe_check = await Person.objects.select_related("supervisor").get(
name="Joe"
)
assert joe_check.supervisor.name == "Sam"
sam_check = await Person.objects.select_related("employees").get(name="Sam")
assert sam_check.name == "Sam"
assert sam_check.employees[0].name == "Joe"
@pytest.mark.asyncio
async def test_other_forwardref_relation(cleanup):
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
checkers = await Game.objects.create(name="checkers")
uno = await Game(name="Uno").save()
await Child(
name="Billy", favourite_game=uno, least_favourite_game=checkers
).save()
await Child(
name="Kate", favourite_game=checkers, least_favourite_game=uno
).save()
billy_check = await Child.objects.select_related(
["favourite_game", "least_favourite_game"]
).get(name="Billy")
assert billy_check.favourite_game == uno
assert billy_check.least_favourite_game == checkers
uno_check = await Game.objects.select_related(
["liked_by", "not_liked_by"]
).get(name="Uno")
assert uno_check.liked_by[0].name == "Billy"
assert uno_check.not_liked_by[0].name == "Kate"
@pytest.mark.asyncio
async def test_m2m_self_forwardref_relation(cleanup):
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
checkers = await Game.objects.create(name="Checkers")
uno = await Game(name="Uno").save()
jenga = await Game(name="Jenga").save()
billy = await Child(
name="Billy", favourite_game=uno, least_favourite_game=checkers
).save()
kate = await Child(
name="Kate", favourite_game=checkers, least_favourite_game=uno
).save()
steve = await Child(
name="Steve", favourite_game=jenga, least_favourite_game=uno
).save()
await billy.friends.add(kate)
await billy.friends.add(steve)
billy_check = await Child.objects.select_related(
[
"friends",
"favourite_game",
"least_favourite_game",
"friends__favourite_game",
"friends__least_favourite_game",
]
).get(name="Billy")
assert len(billy_check.friends) == 2
assert billy_check.friends[0].name == "Kate"
assert billy_check.friends[0].favourite_game.name == "Checkers"
assert billy_check.friends[0].least_favourite_game.name == "Uno"
assert billy_check.friends[1].name == "Steve"
assert billy_check.friends[1].favourite_game.name == "Jenga"
assert billy_check.friends[1].least_favourite_game.name == "Uno"
assert billy_check.favourite_game.name == "Uno"
kate_check = await Child.objects.select_related(["also_friends"]).get(
name="Kate"
)
assert len(kate_check.also_friends) == 1
assert kate_check.also_friends[0].name == "Billy"
billy_check = (
await Child.objects.select_related(
[
"friends",
"favourite_game",
"least_favourite_game",
"friends__favourite_game",
"friends__least_favourite_game",
]
)
.filter(friends__favourite_game__name="Checkers")
.get(name="Billy")
)
assert len(billy_check.friends) == 1
assert billy_check.friends[0].name == "Kate"
assert billy_check.friends[0].favourite_game.name == "Checkers"
assert billy_check.friends[0].least_favourite_game.name == "Uno"
collerek-ormar-c09209a/tests/test_deferred/test_more_same_table_joins.py 0000664 0000000 0000000 00000012261 15130200524 0026665 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class Department(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="departments")
id: int = ormar.Integer(primary_key=True, autoincrement=False)
name: str = ormar.String(max_length=100)
class SchoolClass(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="schoolclasses")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
department: Optional[Department] = ormar.ForeignKey(Department, nullable=False)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="students")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(SchoolClass)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
class Teacher(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="teachers")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(SchoolClass)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
create_test_database = init_tests(base_ormar_config)
async def create_data():
department = await Department.objects.create(id=1, name="Math Department")
department2 = await Department.objects.create(id=2, name="Law Department")
class1 = await SchoolClass.objects.create(name="Math")
class2 = await SchoolClass.objects.create(name="Logic")
category = await Category.objects.create(name="Foreign", department=department)
category2 = await Category.objects.create(name="Domestic", department=department2)
await Student.objects.create(name="Jane", category=category, schoolclass=class1)
await Student.objects.create(name="Judy", category=category2, schoolclass=class1)
await Student.objects.create(name="Jack", category=category2, schoolclass=class2)
await Teacher.objects.create(name="Joe", category=category2, schoolclass=class1)
@pytest.mark.asyncio
async def test_model_multiple_instances_of_same_table_in_schema():
async with base_ormar_config.database:
await create_data()
classes = await SchoolClass.objects.select_related(
["teachers__category__department", "students__category__department"]
).all()
assert classes[0].name == "Math"
assert classes[0].students[0].name == "Jane"
assert len(classes[0].model_dump().get("students")) == 2
assert classes[0].teachers[0].category.department.name == "Law Department"
assert classes[0].students[0].category.department.name == "Math Department"
@pytest.mark.asyncio
async def test_load_all_multiple_instances_of_same_table_in_schema():
async with base_ormar_config.database:
await create_data()
math_class = await SchoolClass.objects.get(name="Math")
assert math_class.name == "Math"
await math_class.load_all(follow=True)
assert math_class.students[0].name == "Jane"
assert len(math_class.model_dump().get("students")) == 2
assert math_class.teachers[0].category.department.name == "Law Department"
assert math_class.students[0].category.department.name == "Math Department"
@pytest.mark.asyncio
async def test_filter_groups_with_instances_of_same_table_in_schema():
async with base_ormar_config.database:
await create_data()
math_class = (
await SchoolClass.objects.select_related(
["teachers__category__department", "students__category__department"]
)
.filter(
ormar.or_(
students__name="Jane",
teachers__category__name="Domestic",
students__category__name="Foreign",
)
)
.get(name="Math")
)
assert math_class.name == "Math"
assert math_class.students[0].name == "Jane"
assert len(math_class.model_dump().get("students")) == 2
assert math_class.teachers[0].category.department.name == "Law Department"
assert math_class.students[0].category.department.name == "Math Department"
classes = (
await SchoolClass.objects.select_related(
["students__category__department", "teachers__category__department"]
)
.filter(
ormar.and_(
ormar.or_(
students__name="Jane", students__category__name="Foreign"
),
teachers__category__department__name="Law Department",
)
)
.all()
)
assert len(classes) == 1
assert classes[0].teachers[0].category.department.name == "Law Department"
assert classes[0].students[0].category.department.name == "Math Department"
collerek-ormar-c09209a/tests/test_deferred/test_same_table_joins.py 0000664 0000000 0000000 00000011473 15130200524 0025647 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Department(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="departments")
id: int = ormar.Integer(primary_key=True, autoincrement=False)
name: str = ormar.String(max_length=100)
class SchoolClass(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="schoolclasses")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
department: Optional[Department] = ormar.ForeignKey(Department, nullable=False)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="students")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(SchoolClass)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
class Teacher(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="teachers")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(SchoolClass)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
create_test_database = init_tests(base_ormar_config)
async def create_data():
department = await Department.objects.create(id=1, name="Math Department")
department2 = await Department.objects.create(id=2, name="Law Department")
class1 = await SchoolClass.objects.create(name="Math", department=department)
class2 = await SchoolClass.objects.create(name="Logic", department=department2)
category = await Category.objects.create(name="Foreign")
category2 = await Category.objects.create(name="Domestic")
await Student.objects.create(name="Jane", category=category, schoolclass=class1)
await Student.objects.create(name="Judy", category=category2, schoolclass=class1)
await Student.objects.create(name="Jack", category=category2, schoolclass=class2)
await Teacher.objects.create(name="Joe", category=category2, schoolclass=class1)
@pytest.mark.asyncio
async def test_model_multiple_instances_of_same_table_in_schema():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await create_data()
classes = await SchoolClass.objects.select_related(
["teachers__category", "students__schoolclass"]
).all()
assert classes[0].name == "Math"
assert classes[0].students[0].name == "Jane"
assert len(classes[0].model_dump().get("students")) == 2
# since it's going from schoolclass => teacher
# => schoolclass (same class) department is already populated
assert classes[0].students[0].schoolclass.name == "Math"
assert classes[0].students[0].schoolclass.department.name is None
await classes[0].students[0].schoolclass.department.load()
assert (
classes[0].students[0].schoolclass.department.name == "Math Department"
)
await classes[1].students[0].schoolclass.department.load()
assert (
classes[1].students[0].schoolclass.department.name == "Law Department"
)
@pytest.mark.asyncio
async def test_right_tables_join():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await create_data()
classes = await SchoolClass.objects.select_related(
["teachers__category", "students"]
).all()
assert classes[0].teachers[0].category.name == "Domestic"
assert classes[0].students[0].category.name is None
await classes[0].students[0].category.load()
assert classes[0].students[0].category.name == "Foreign"
@pytest.mark.asyncio
async def test_multiple_reverse_related_objects():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await create_data()
classes = await SchoolClass.objects.select_related(
["teachers__category", "students__category"]
).all()
assert classes[0].name == "Math"
assert classes[0].students[1].name == "Judy"
assert classes[0].students[0].category.name == "Foreign"
assert classes[0].students[1].category.name == "Domestic"
assert classes[0].teachers[0].category.name == "Domestic"
collerek-ormar-c09209a/tests/test_encryption/ 0000775 0000000 0000000 00000000000 15130200524 0021344 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_encryption/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0023443 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_encryption/test_encrypted_columns.py 0000664 0000000 0000000 00000021062 15130200524 0026513 0 ustar 00root root 0000000 0000000 # type: ignore
import base64
import datetime
import decimal
import hashlib
import uuid
from typing import Any
import ormar
import pytest
from ormar import ModelDefinitionError, NoMatch
from ormar.fields.sqlalchemy_encrypted import EncryptedString
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
default_fernet = dict(
encrypt_secret="asd123", encrypt_backend=ormar.EncryptBackends.FERNET
)
class DummyBackend(ormar.fields.EncryptBackend):
def _initialize_backend(self, secret_key: bytes) -> None:
pass
def encrypt(self, value: Any) -> str:
return value
def decrypt(self, value: Any) -> str:
return value
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, **default_fernet)
uuid_test = ormar.UUID(default=uuid.uuid4, uuid_format="string")
uuid_test2 = ormar.UUID(nullable=True, uuid_format="string")
password: str = ormar.String(
max_length=128,
encrypt_secret="udxc32",
encrypt_backend=ormar.EncryptBackends.HASH,
)
birth_year: int = ormar.Integer(
nullable=True,
encrypt_secret="secure89key%^&psdijfipew",
encrypt_backend=ormar.EncryptBackends.FERNET,
)
test_text: str = ormar.Text(default="", **default_fernet)
test_bool: bool = ormar.Boolean(nullable=False, **default_fernet)
test_float: float = ormar.Float(**default_fernet)
test_float2: float = ormar.Float(nullable=True, **default_fernet)
test_datetime = ormar.DateTime(default=datetime.datetime.now, **default_fernet)
test_date = ormar.Date(default=datetime.date.today, **default_fernet)
test_time = ormar.Time(default=datetime.time, **default_fernet)
test_json = ormar.JSON(default={}, **default_fernet)
test_bigint: int = ormar.BigInteger(default=0, **default_fernet)
test_smallint: int = ormar.SmallInteger(default=0, **default_fernet)
test_decimal = ormar.Decimal(scale=2, precision=10, **default_fernet)
test_decimal2 = ormar.Decimal(max_digits=10, decimal_places=2, **default_fernet)
test_bytes = ormar.LargeBinary(max_length=100, **default_fernet)
test_b64bytes = ormar.LargeBinary(
max_length=100, represent_as_base64_str=True, **default_fernet
)
custom_backend: str = ormar.String(
max_length=200,
encrypt_secret="asda8",
encrypt_backend=ormar.EncryptBackends.CUSTOM,
encrypt_custom_backend=DummyBackend,
)
class Hash(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="hashes")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(
max_length=128,
encrypt_secret="udxc32",
encrypt_backend=ormar.EncryptBackends.HASH,
)
class Filter(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="filters")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, **default_fernet)
hash = ormar.ForeignKey(Hash)
class Report(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="reports")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
filters = ormar.ManyToMany(Filter)
create_test_database = init_tests(base_ormar_config)
def test_error_on_encrypted_pk():
with pytest.raises(ModelDefinitionError):
class Wrong(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="wrongs")
id: int = ormar.Integer(
primary_key=True,
encrypt_secret="asd123",
encrypt_backend=ormar.EncryptBackends.FERNET,
)
def test_error_on_encrypted_relation():
with pytest.raises(ModelDefinitionError):
class Wrong2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="wrongs2")
id: int = ormar.Integer(primary_key=True)
author = ormar.ForeignKey(
Author,
encrypt_secret="asd123",
encrypt_backend=ormar.EncryptBackends.FERNET,
)
def test_error_on_encrypted_m2m_relation():
with pytest.raises(ModelDefinitionError):
class Wrong3(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="wrongs3")
id: int = ormar.Integer(primary_key=True)
author = ormar.ManyToMany(
Author,
encrypt_secret="asd123",
encrypt_backend=ormar.EncryptBackends.FERNET,
)
def test_wrong_backend():
with pytest.raises(ModelDefinitionError):
class Wrong3(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="wrongs3")
id: int = ormar.Integer(primary_key=True)
author = ormar.Integer(
encrypt_secret="asd123",
encrypt_backend=ormar.EncryptBackends.CUSTOM,
encrypt_custom_backend="aa",
)
def test_db_structure():
assert Author.ormar_config.table.c.get("name").type.__class__ == EncryptedString
@pytest.mark.asyncio
async def test_save_and_retrieve():
async with base_ormar_config.database:
test_uuid = uuid.uuid4()
await Author(
name="Test",
birth_year=1988,
password="test123",
uuid_test=test_uuid,
test_float=1.2,
test_bool=True,
test_decimal=3.57,
test_decimal2=decimal.Decimal(5.5),
test_json=dict(aa=12),
custom_backend="test12",
test_bytes=b"test",
test_b64bytes=b"test2",
).save()
author = await Author.objects.get()
assert author.name == "Test"
assert author.birth_year == 1988
password = (
"03e4a4d513e99cb3fe4ee3db282c053daa3f3572b849c3868939a306944ad5c08"
"22b50d4886e10f4cd418c3f2df3ceb02e2e7ac6e920ae0c90f2dedfc8fa16e2"
)
assert author.password == password
assert author.uuid_test == test_uuid
assert author.uuid_test2 is None
assert author.test_datetime.date() == datetime.date.today()
assert author.test_date == datetime.date.today()
assert author.test_text == ""
assert author.test_float == 1.2
assert author.test_float2 is None
assert author.test_bigint == 0
assert author.test_json == {"aa": 12}
assert float(author.test_decimal) == 3.57
assert author.test_decimal2 == 5.5
assert author.custom_backend == "test12"
assert author.test_bytes == "test".encode("utf-8")
assert author.test_b64bytes == "dGVzdDI="
assert base64.b64decode(author.test_b64bytes) == b"test2"
@pytest.mark.asyncio
async def test_fernet_filters_nomatch():
async with base_ormar_config.database:
await Filter(name="test1").save()
await Filter(name="test1").save()
filters = await Filter.objects.all()
assert filters[0].name == filters[1].name == "test1"
with pytest.raises(NoMatch):
await Filter.objects.get(name="test1")
assert await Filter.objects.get_or_none(name="test1") is None
@pytest.mark.asyncio
async def test_hash_filters_works():
async with base_ormar_config.database:
await Hash(name="test1").save()
await Hash(name="test2").save()
secret = hashlib.sha256("udxc32".encode()).digest()
secret = base64.urlsafe_b64encode(secret)
hashed_test1 = hashlib.sha512(secret + "test1".encode()).hexdigest()
hash1 = await Hash.objects.get(name="test1")
assert hash1.name == hashed_test1
with pytest.raises(NoMatch):
await Filter.objects.get(name__icontains="test")
@pytest.mark.asyncio
async def test_related_model_fields_properly_decrypted():
async with base_ormar_config.database:
hash1 = await Hash(name="test1").save()
report = await Report.objects.create(name="Report1")
await report.filters.create(name="test1", hash=hash1)
await report.filters.create(name="test2")
report2 = await Report.objects.select_related("filters").get()
assert report2.filters[0].name == "test1"
assert report2.filters[1].name == "test2"
secret = hashlib.sha256("udxc32".encode()).digest()
secret = base64.urlsafe_b64encode(secret)
hashed_test1 = hashlib.sha512(secret + "test1".encode()).hexdigest()
report2 = await Report.objects.select_related("filters__hash").get()
assert report2.filters[0].name == "test1"
assert report2.filters[0].hash.name == hashed_test1
collerek-ormar-c09209a/tests/test_exclude_include_dict/ 0000775 0000000 0000000 00000000000 15130200524 0023311 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_exclude_include_dict/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0025410 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_exclude_include_dict/test_complex_relation_tree_performance.py 0000664 0000000 0000000 00000032275 15130200524 0033677 0 ustar 00root root 0000000 0000000 from datetime import datetime
from typing import List, Optional, Union
import ormar as orm
import pydantic
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class ChagenlogRelease(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="changelog_release")
class CommitIssue(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="commit_issues")
class CommitLabel(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="commit_label")
class MergeRequestCommit(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="merge_request_commits")
class MergeRequestIssue(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="merge_request_issues")
class MergeRequestLabel(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="merge_request_labels")
class ProjectLabel(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="project_label")
class PushCommit(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="push_commit")
class PushLabel(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="push_label")
class TagCommit(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="tag_commits")
class TagIssue(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="tag_issue")
class TagLabel(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
ormar_config = base_ormar_config.copy(tablename="tag_label")
class UserProject(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
access_level: int = orm.Integer(default=0)
ormar_config = base_ormar_config.copy(tablename="user_project")
class Label(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
title: str = orm.String(max_length=100)
description: str = orm.Text(default="")
type: str = orm.String(max_length=100, default="")
ormar_config = base_ormar_config.copy(tablename="labels")
class Project(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
name: str = orm.String(max_length=100)
description: str = orm.Text(default="")
git_url: str = orm.String(max_length=500, default="")
labels: Optional[Union[List[Label], Label]] = orm.ManyToMany(
Label, through=ProjectLabel, ondelete="CASCADE", onupdate="CASCADE"
)
changelog_jira_tag: str = orm.String(max_length=100, default="")
change_type_jira_tag: str = orm.String(max_length=100, default="")
jira_prefix: str = orm.String(max_length=10, default="SAN")
type: str = orm.String(max_length=10, default="cs")
target_branch_name: str = orm.String(max_length=100, default="master")
header: str = orm.String(max_length=250, default="")
jira_url: str = orm.String(max_length=500)
changelog_file: str = orm.String(max_length=250, default="")
version_file: str = orm.String(max_length=250, default="")
ormar_config = base_ormar_config.copy(tablename="projects")
class Issue(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
summary: str = orm.Text(default="")
description: str = orm.Text(default="")
changelog: str = orm.Text(default="")
link: str = orm.String(max_length=500)
issue_type: str = orm.String(max_length=100)
key: str = orm.String(max_length=100)
change_type: str = orm.String(max_length=100, default="")
data: pydantic.Json = orm.JSON(default={})
ormar_config = base_ormar_config.copy(tablename="issues")
class User(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
username: str = orm.String(max_length=100, unique=True)
name: str = orm.String(max_length=200, default="")
ormar_config = base_ormar_config.copy(tablename="users")
class Branch(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
name: str = orm.String(max_length=200)
description: str = orm.Text(default="")
automatic_tags: bool = orm.Boolean(default=False)
is_it_locked: bool = orm.Boolean(default=True)
prefix_tag: str = orm.String(max_length=50, default="")
postfix_tag: str = orm.String(max_length=50, default="")
project: Project = orm.ForeignKey(Project, ondelete="CASCADE", onupdate="CASCADE")
ormar_config = base_ormar_config.copy(tablename="branches")
class Changelog(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
content: str = orm.Text(default="")
version: str = orm.Text(default="")
past_changelog: int = orm.Integer(default=0)
label: Label = orm.ForeignKey(
Label, nullable=True, ondelete="CASCADE", onupdate="CASCADE"
)
project: Project = orm.ForeignKey(Project, ondelete="CASCADE", onupdate="CASCADE")
created_date: datetime = orm.DateTime(default=datetime.utcnow())
ormar_config = base_ormar_config.copy(tablename="changelogs")
class Commit(orm.Model):
id: str = orm.String(max_length=500, primary_key=True)
short_id: str = orm.String(max_length=500)
title: str = orm.String(max_length=500)
message: str = orm.Text(default="")
url = orm.String(max_length=500, default="")
author_name = orm.String(max_length=500, default="")
labels: Optional[Union[List[Label], Label]] = orm.ManyToMany(
Label, through=CommitLabel, ondelete="CASCADE", onupdate="CASCADE"
)
issues: Optional[Union[List[Issue], Issue]] = orm.ManyToMany(
Issue, through=CommitIssue, ondelete="CASCADE", onupdate="CASCADE"
)
ormar_config = base_ormar_config.copy(tablename="commits")
class MergeRequest(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
idd: int = orm.Integer(default=0)
title: str = orm.String(max_length=500)
state: str = orm.String(max_length=100)
merge_status: str = orm.String(max_length=100)
description: str = orm.Text(default="")
source: Branch = orm.ForeignKey(Branch, related_name="source")
target: Branch = orm.ForeignKey(Branch, related_name="target")
labels: Optional[Union[List[Label], Label]] = orm.ManyToMany(
Label, through=MergeRequestLabel, ondelete="CASCADE", onupdate="CASCADE"
)
commits: Optional[Union[List[Commit], Commit]] = orm.ManyToMany(
Commit, through=MergeRequestCommit, ondelete="CASCADE", onupdate="CASCADE"
)
issues: Optional[Union[List[Issue], Issue]] = orm.ManyToMany(
Issue, through=MergeRequestIssue, ondelete="CASCADE", onupdate="CASCADE"
)
project: Project = orm.ForeignKey(Project, ondelete="CASCADE", onupdate="CASCADE")
ormar_config = base_ormar_config.copy(tablename="merge_requests")
class Push(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
branch: Branch = orm.ForeignKey(
Branch, nullable=True, ondelete="CASCADE", onupdate="CASCADE"
)
has_locking_changes: bool = orm.Boolean(default=False)
sha: str = orm.String(max_length=200)
labels: Optional[Union[List[Label], Label]] = orm.ManyToMany(
Label, through=PushLabel, ondelete="CASCADE", onupdate="CASCADE"
)
commits: Optional[Union[List[Commit], Commit]] = orm.ManyToMany(
Commit,
through=PushCommit,
through_relation_name="push",
through_reverse_relation_name="commit_id",
ondelete="CASCADE",
onupdate="CASCADE",
)
author: User = orm.ForeignKey(User, ondelete="CASCADE", onupdate="CASCADE")
project: Project = orm.ForeignKey(Project, ondelete="CASCADE", onupdate="CASCADE")
ormar_config = base_ormar_config.copy(tablename="pushes")
class Tag(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
name: str = orm.String(max_length=200)
ref: str = orm.String(max_length=200)
project: Project = orm.ForeignKey(Project, ondelete="CASCADE", onupdate="CASCADE")
title: str = orm.String(max_length=200, default="")
description: str = orm.Text(default="")
commits: Optional[Union[List[Commit], Commit]] = orm.ManyToMany(
Commit,
through=TagCommit,
through_relation_name="tag",
through_reverse_relation_name="commit_id",
ondelete="CASCADE",
onupdate="CASCADE",
)
issues: Optional[Union[List[Issue], Issue]] = orm.ManyToMany(
Issue, through=TagIssue, ondelete="CASCADE", onupdate="CASCADE"
)
labels: Optional[Union[List[Label], Label]] = orm.ManyToMany(
Label, through=TagLabel, ondelete="CASCADE", onupdate="CASCADE"
)
user: User = orm.ForeignKey(
User, nullable=True, ondelete="CASCADE", onupdate="CASCADE"
)
branch: Branch = orm.ForeignKey(
Branch, nullable=True, ondelete="CASCADE", onupdate="CASCADE"
)
ormar_config = base_ormar_config.copy(tablename="tags")
class Release(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
title: str = orm.String(max_length=200, default="")
description: str = orm.Text(default="")
tag: Tag = orm.ForeignKey(Tag, ondelete="CASCADE", onupdate="CASCADE")
changelogs: List[Changelog] = orm.ManyToMany(
Changelog, through=ChagenlogRelease, ondelete="CASCADE", onupdate="CASCADE"
)
data: pydantic.Json = orm.JSON(default={})
ormar_config = base_ormar_config.copy(tablename="releases")
class Webhook(orm.Model):
id: int = orm.Integer(name="id", primary_key=True)
object_kind = orm.String(max_length=100)
project: Project = orm.ForeignKey(Project, ondelete="CASCADE", onupdate="CASCADE")
merge_request: MergeRequest = orm.ForeignKey(
MergeRequest, nullable=True, ondelete="CASCADE", onupdate="CASCADE"
)
tag: Tag = orm.ForeignKey(
Tag, nullable=True, ondelete="CASCADE", onupdate="CASCADE"
)
push: Push = orm.ForeignKey(
Push, nullable=True, ondelete="CASCADE", onupdate="CASCADE"
)
created_at: datetime = orm.DateTime(default=datetime.now())
data: pydantic.Json = orm.JSON(default={})
status: int = orm.Integer(default=200)
error: str = orm.Text(default="")
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_very_complex_relation_map():
async with base_ormar_config.database:
tags = [
{"id": 18, "name": "name-18", "ref": "ref-18"},
{"id": 17, "name": "name-17", "ref": "ref-17"},
{"id": 12, "name": "name-12", "ref": "ref-12"},
]
payload = [
{
"id": 9,
"title": "prueba-2321",
"description": "\n"
"Description 1"
"\n",
"data": {},
},
{
"id": 8,
"title": "prueba-123-prod",
"description": "\n"
"Description 2"
"\n",
"data": {},
},
{
"id": 6,
"title": "prueba-3-2",
"description": "\n"
"Description 3"
"\n",
"data": {},
},
]
saved_tags = []
for tag in tags:
saved_tags.append(await Tag(**tag).save())
for ind, pay in enumerate(payload):
await Release(**pay, tag=saved_tags[ind]).save()
releases = await Release.objects.order_by(Release.id.desc()).all()
dicts = [release.model_dump() for release in releases]
result = [
{
"id": 9,
"title": "prueba-2321",
"description": "\n"
"Description 1"
"\n",
"data": {},
"tag": {
"id": 18,
},
"changelogs": [],
},
{
"id": 8,
"title": "prueba-123-prod",
"description": "\n"
"Description 2"
"\n",
"data": {},
"tag": {
"id": 17,
},
"changelogs": [],
},
{
"id": 6,
"title": "prueba-3-2",
"description": "\n"
"Description 3"
"\n",
"data": {},
"tag": {
"id": 12,
},
"changelogs": [],
},
]
assert dicts == result
collerek-ormar-c09209a/tests/test_exclude_include_dict/test_dumping_model_to_dict.py 0000664 0000000 0000000 00000013201 15130200524 0031247 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Role(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255, nullable=False)
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(max_length=255, nullable=False)
password: str = ormar.String(max_length=255, nullable=True)
first_name: str = ormar.String(max_length=255, nullable=False)
roles: List[Role] = ormar.ManyToMany(Role)
class Tier(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
tier: Optional[Tier] = ormar.ForeignKey(Tier)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
created_by: Optional[User] = ormar.ForeignKey(User)
create_test_database = init_tests(base_ormar_config)
@pytest.fixture(autouse=True, scope="module")
def sample_data():
role = Role(name="User", id=1)
role2 = Role(name="Admin", id=2)
user = User(
id=1,
email="test@test.com",
password="ijacids7^*&",
first_name="Anna",
roles=[role, role2],
)
tier = Tier(id=1, name="Tier I")
category1 = Category(id=1, name="Toys", tier=tier)
category2 = Category(id=2, name="Weapons", tier=tier)
item1 = Item(id=1, name="Teddy Bear", category=category1, created_by=user)
item2 = Item(id=2, name="M16", category=category2, created_by=user)
return item1, item2
def test_dumping_to_dict_no_exclusion(sample_data):
item1, item2 = sample_data
dict1 = item1.model_dump()
assert dict1["name"] == "Teddy Bear"
assert dict1["category"]["name"] == "Toys"
assert dict1["category"]["tier"]["name"] == "Tier I"
assert dict1["created_by"]["email"] == "test@test.com"
dict2 = item2.model_dump()
assert dict2["name"] == "M16"
assert dict2["category"]["name"] == "Weapons"
assert dict2["created_by"]["email"] == "test@test.com"
def test_dumping_to_dict_exclude_set(sample_data):
item1, item2 = sample_data
dict3 = item2.model_dump(exclude={"name"})
assert "name" not in dict3
assert dict3["category"]["name"] == "Weapons"
assert dict3["created_by"]["email"] == "test@test.com"
dict4 = item2.model_dump(exclude={"category"})
assert dict4["name"] == "M16"
assert "category" not in dict4
assert dict4["created_by"]["email"] == "test@test.com"
dict5 = item2.model_dump(exclude={"category", "name"})
assert "name" not in dict5
assert "category" not in dict5
assert dict5["created_by"]["email"] == "test@test.com"
def test_dumping_to_dict_exclude_dict(sample_data):
item1, item2 = sample_data
dict6 = item2.model_dump(exclude={"category": {"name"}, "name": ...})
assert "name" not in dict6
assert "category" in dict6
assert "name" not in dict6["category"]
assert dict6["created_by"]["email"] == "test@test.com"
def test_dumping_to_dict_exclude_nested_dict(sample_data):
item1, item2 = sample_data
dict1 = item2.model_dump(exclude={"category": {"tier": {"name"}}, "name": ...})
assert "name" not in dict1
assert "category" in dict1
assert dict1["category"]["name"] == "Weapons"
assert dict1["created_by"]["email"] == "test@test.com"
assert dict1["category"]["tier"].get("name") is None
def test_dumping_to_dict_exclude_and_include_nested_dict(sample_data):
item1, item2 = sample_data
dict1 = item2.model_dump(
exclude={"category": {"tier": {"name"}}}, include={"name", "category"}
)
assert dict1.get("name") == "M16"
assert "category" in dict1
assert dict1["category"]["name"] == "Weapons"
assert "created_by" not in dict1
assert dict1["category"]["tier"].get("name") is None
dict2 = item1.model_dump(
exclude={"id": ...},
include={"name": ..., "category": {"name": ..., "tier": {"id"}}},
)
assert dict2.get("name") == "Teddy Bear"
assert dict2.get("id") is None # models not saved
assert dict2["category"]["name"] == "Toys"
assert "created_by" not in dict1
assert dict1["category"]["tier"].get("name") is None
assert dict1["category"]["tier"]["id"] == 1
def test_dumping_dict_without_primary_keys(sample_data):
item1, item2 = sample_data
dict1 = item2.model_dump(exclude_primary_keys=True)
assert dict1 == {
"category": {"name": "Weapons", "tier": {"name": "Tier I"}},
"created_by": {
"email": "test@test.com",
"first_name": "Anna",
"password": "ijacids7^*&",
"roles": [
{"name": "User"},
{"name": "Admin"},
],
},
"name": "M16",
}
dict2 = item1.model_dump(exclude_primary_keys=True)
assert dict2 == {
"category": {"name": "Toys", "tier": {"name": "Tier I"}},
"created_by": {
"email": "test@test.com",
"first_name": "Anna",
"password": "ijacids7^*&",
"roles": [
{"name": "User"},
{"name": "Admin"},
],
},
"name": "Teddy Bear",
}
collerek-ormar-c09209a/tests/test_exclude_include_dict/test_excludable_items.py 0000664 0000000 0000000 00000015051 15130200524 0030235 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
from ormar.models.excludable import ExcludableItems
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class NickNames(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
is_lame: bool = ormar.Boolean(nullable=True)
class NicksHq(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks_x_hq")
class HQ(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
nicks: List[NickNames] = ormar.ManyToMany(NickNames, through=NicksHq)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="company_name")
founded: int = ormar.Integer(nullable=True)
hq: HQ = ormar.ForeignKey(HQ)
class Car(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
manufacturer: Optional[Company] = ormar.ForeignKey(Company)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
gearbox_type: str = ormar.String(max_length=20, nullable=True)
gears: int = ormar.Integer(nullable=True)
aircon_type: str = ormar.String(max_length=20, nullable=True)
create_test_database = init_tests(base_ormar_config)
def compare_results(excludable):
car_excludable = excludable.get(Car)
assert car_excludable.exclude == {"year", "gearbox_type", "gears", "aircon_type"}
assert car_excludable.include == set()
assert car_excludable.is_excluded("year")
alias = Company.ormar_config.alias_manager.resolve_relation_alias(
Car, "manufacturer"
)
manu_excludable = excludable.get(Company, alias=alias)
assert manu_excludable.exclude == {"founded"}
assert manu_excludable.include == set()
assert manu_excludable.is_excluded("founded")
def compare_results_include(excludable):
manager = Company.ormar_config.alias_manager
car_excludable = excludable.get(Car)
assert car_excludable.include == {"id", "name"}
assert car_excludable.exclude == set()
assert car_excludable.is_included("name")
assert not car_excludable.is_included("gears")
alias = manager.resolve_relation_alias(Car, "manufacturer")
manu_excludable = excludable.get(Company, alias=alias)
assert manu_excludable.include == {"name"}
assert manu_excludable.exclude == set()
assert manu_excludable.is_included("name")
assert not manu_excludable.is_included("founded")
alias = manager.resolve_relation_alias(Company, "hq")
hq_excludable = excludable.get(HQ, alias=alias)
assert hq_excludable.include == {"name"}
assert hq_excludable.exclude == set()
alias = manager.resolve_relation_alias(NicksHq, "nicknames")
nick_excludable = excludable.get(NickNames, alias=alias)
assert nick_excludable.include == {"name"}
assert nick_excludable.exclude == set()
def test_excluding_fields_from_list():
fields = ["gearbox_type", "gears", "aircon_type", "year", "manufacturer__founded"]
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=True)
compare_results(excludable)
def test_excluding_fields_from_dict():
fields = {
"gearbox_type": ...,
"gears": ...,
"aircon_type": ...,
"year": ...,
"manufacturer": {"founded": ...},
}
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=True)
compare_results(excludable)
def test_excluding_fields_from_dict_with_set():
fields = {
"gearbox_type": ...,
"gears": ...,
"aircon_type": ...,
"year": ...,
"manufacturer": {"founded"},
}
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=True)
compare_results(excludable)
def test_gradual_build_from_lists():
fields_col = [
"year",
["gearbox_type", "gears"],
"aircon_type",
["manufacturer__founded"],
]
excludable = ExcludableItems()
for fields in fields_col:
excludable.build(items=fields, model_cls=Car, is_exclude=True)
compare_results(excludable)
def test_nested_includes():
fields = [
"id",
"name",
"manufacturer__name",
"manufacturer__hq__name",
"manufacturer__hq__nicks__name",
]
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=False)
compare_results_include(excludable)
def test_nested_includes_from_dict():
fields = {
"id": ...,
"name": ...,
"manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name": ...}}},
}
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=False)
compare_results_include(excludable)
def test_nested_includes_from_dict_with_set():
fields = {
"id": ...,
"name": ...,
"manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name"}}},
}
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=False)
compare_results_include(excludable)
def test_includes_and_excludes_combo():
fields_inc1 = ["id", "name", "year", "gearbox_type", "gears"]
fields_inc2 = {"manufacturer": {"name"}}
fields_exc1 = {"manufacturer__founded"}
fields_exc2 = "aircon_type"
excludable = ExcludableItems()
excludable.build(items=fields_inc1, model_cls=Car, is_exclude=False)
excludable.build(items=fields_inc2, model_cls=Car, is_exclude=False)
excludable.build(items=fields_exc1, model_cls=Car, is_exclude=True)
excludable.build(items=fields_exc2, model_cls=Car, is_exclude=True)
car_excludable = excludable.get(Car)
assert car_excludable.include == {"id", "name", "year", "gearbox_type", "gears"}
assert car_excludable.exclude == {"aircon_type"}
assert car_excludable.is_excluded("aircon_type")
assert car_excludable.is_included("name")
alias = Company.ormar_config.alias_manager.resolve_relation_alias(
Car, "manufacturer"
)
manu_excludable = excludable.get(Company, alias=alias)
assert manu_excludable.include == {"name"}
assert manu_excludable.exclude == {"founded"}
assert manu_excludable.is_excluded("founded")
collerek-ormar-c09209a/tests/test_exclude_include_dict/test_excluding_fields_in_fastapi.py 0000664 0000000 0000000 00000020764 15130200524 0032440 0 ustar 00root root 0000000 0000000 import datetime
import random
import string
import ormar
import pydantic
import pytest
import sqlalchemy
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from ormar import post_save
from pydantic import ConfigDict, computed_field
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
# note that you can set orm_mode here
# and in this case UserSchema become unnecessary
class UserBase(pydantic.BaseModel):
model_config = ConfigDict(from_attributes=True)
email: str
first_name: str
last_name: str
class UserCreateSchema(UserBase):
password: str
category: str
class UserSchema(UserBase):
model_config = ConfigDict(from_attributes=True)
def gen_pass():
choices = string.ascii_letters + string.digits + "!@#$%^&*()"
return "".join(random.choice(choices) for _ in range(20))
class RandomModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="random_users")
id: int = ormar.Integer(primary_key=True)
password: str = ormar.String(max_length=255, default=gen_pass)
first_name: str = ormar.String(max_length=255, default="John")
last_name: str = ormar.String(max_length=255)
created_date: datetime.datetime = ormar.DateTime(
server_default=sqlalchemy.func.now()
)
@computed_field
def full_name(self) -> str:
return " ".join([self.first_name, self.last_name])
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users")
id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(max_length=255)
password: str = ormar.String(max_length=255, nullable=True)
first_name: str = ormar.String(max_length=255)
last_name: str = ormar.String(max_length=255)
category: str = ormar.String(max_length=255, nullable=True)
class User2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users2")
id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(max_length=255, nullable=False)
password: str = ormar.String(max_length=255)
first_name: str = ormar.String(max_length=255)
last_name: str = ormar.String(max_length=255)
category: str = ormar.String(max_length=255, nullable=True)
timestamp: datetime.datetime = pydantic.Field(default_factory=datetime.datetime.now)
create_test_database = init_tests(base_ormar_config)
@app.post("/users/", response_model=User, response_model_exclude={"password"})
async def create_user(user: User):
return await user.save()
@app.post("/users2/", response_model=User)
async def create_user2(user: User):
user = await user.save()
return user.model_dump(exclude={"password"})
@app.post("/users3/", response_model=UserBase)
async def create_user3(user: User2):
return await user.save()
@app.post("/users4/")
async def create_user4(user: User2):
return (await user.save()).model_dump(exclude={"password"})
@app.post("/random/", response_model=RandomModel)
async def create_user5(user: RandomModel):
return await user.save()
@app.post("/random2/", response_model=RandomModel)
async def create_user6(user: RandomModel):
return await user.save()
@app.post("/random3/", response_model=RandomModel, response_model_exclude={"full_name"})
async def create_user7(user: RandomModel):
return await user.save()
@pytest.mark.asyncio
async def test_excluding_fields_in_endpoints():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
user = {
"email": "test@domain.com",
"password": "^*^%A*DA*IAAA",
"first_name": "John",
"last_name": "Doe",
}
response = await client.post("/users/", json=user)
created_user = User(**response.json())
assert created_user.pk is not None
assert created_user.password is None
user2 = {"email": "test@domain.com", "first_name": "John", "last_name": "Doe"}
response = await client.post("/users/", json=user2)
created_user = User(**response.json())
assert created_user.pk is not None
assert created_user.password is None
response = await client.post("/users2/", json=user)
created_user2 = User(**response.json())
assert created_user2.pk is not None
assert created_user2.password is None
# response has only 3 fields from UserBase
response = await client.post("/users3/", json=user)
assert list(response.json().keys()) == ["email", "first_name", "last_name"]
timestamp = datetime.datetime.now()
user3 = {
"email": "test@domain.com",
"password": "^*^%A*DA*IAAA",
"first_name": "John",
"last_name": "Doe",
"timestamp": str(timestamp),
}
response = await client.post("/users4/", json=user3)
assert list(response.json().keys()) == [
"id",
"email",
"first_name",
"last_name",
"category",
"timestamp",
]
assert response.json().get("timestamp") == str(timestamp).replace(" ", "T")
resp_dict = response.json()
resp_dict.update({"password": "random"})
user_instance = User2(**resp_dict)
assert user_instance.timestamp is not None
assert isinstance(user_instance.timestamp, datetime.datetime)
assert user_instance.timestamp == timestamp
response = await client.post("/users4/", json=user3)
assert list(response.json().keys()) == [
"id",
"email",
"first_name",
"last_name",
"category",
"timestamp",
]
assert (
datetime.datetime.strptime(
response.json().get("timestamp"), "%Y-%m-%dT%H:%M:%S.%f"
)
== timestamp
)
@pytest.mark.asyncio
async def test_adding_fields_in_endpoints():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
user3 = {"last_name": "Test", "full_name": "deleted"}
response = await client.post("/random/", json=user3)
assert list(response.json().keys()) == [
"id",
"password",
"first_name",
"last_name",
"created_date",
"full_name",
]
assert response.json().get("full_name") == "John Test"
user3 = {"last_name": "Test"}
response = await client.post("/random/", json=user3)
assert list(response.json().keys()) == [
"id",
"password",
"first_name",
"last_name",
"created_date",
"full_name",
]
assert response.json().get("full_name") == "John Test"
@pytest.mark.asyncio
async def test_adding_fields_in_endpoints2():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
user3 = {"last_name": "Test"}
response = await client.post("/random2/", json=user3)
assert list(response.json().keys()) == [
"id",
"password",
"first_name",
"last_name",
"created_date",
"full_name",
]
assert response.json().get("full_name") == "John Test"
@pytest.mark.asyncio
async def test_excluding_property_field_in_endpoints2():
dummy_registry = {}
@post_save(RandomModel)
async def after_save(sender, instance, **kwargs):
dummy_registry[instance.pk] = instance.model_dump()
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
user3 = {"last_name": "Test"}
response = await client.post("/random3/", json=user3)
assert list(response.json().keys()) == [
"id",
"password",
"first_name",
"last_name",
"created_date",
]
assert response.json().get("full_name") is None
assert len(dummy_registry) == 1
check_dict = dummy_registry.get(response.json().get("id"))
check_dict.pop("full_name")
assert response.json().get("password") == check_dict.get("password")
collerek-ormar-c09209a/tests/test_exclude_include_dict/test_excluding_fields_with_default.py 0000664 0000000 0000000 00000006626 15130200524 0033003 0 ustar 00root root 0000000 0000000 import random
from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
def get_position() -> int:
return random.randint(1, 10)
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False, nullable=True)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tracks")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer(default=get_position)
play_count: int = ormar.Integer(nullable=True, default=0)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_excluding_field_with_default():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
album = await Album.objects.create(name="Miami")
await Track.objects.create(title="Vice City", album=album, play_count=10)
await Track.objects.create(title="Beach Sand", album=album, play_count=20)
await Track.objects.create(title="Night Lights", album=album)
album = await Album.objects.fields("name").get()
assert album.is_best_seller is None
album = await Album.objects.exclude_fields({"is_best_seller", "id"}).get()
assert album.is_best_seller is None
album = await Album.objects.exclude_fields({"is_best_seller": ...}).get()
assert album.is_best_seller is None
tracks = await Track.objects.all()
for track in tracks:
assert track.play_count is not None
assert track.position is not None
album = (
await Album.objects.select_related("tracks")
.exclude_fields({"is_best_seller": ..., "tracks": {"play_count"}})
.get(name="Miami")
)
assert album.is_best_seller is None
assert len(album.tracks) == 3
for track in album.tracks:
assert track.play_count is None
assert track.position is not None
album = (
await Album.objects.select_related("tracks")
.exclude_fields(
{
"is_best_seller": ...,
"tracks": {"play_count": ..., "position": ...},
}
)
.get(name="Miami")
)
assert album.is_best_seller is None
assert len(album.tracks) == 3
for track in album.tracks:
assert track.play_count is None
assert track.position is None
album = (
await Album.objects.select_related("tracks")
.exclude_fields(
{"is_best_seller": ..., "tracks": {"play_count", "position"}}
)
.get(name="Miami")
)
assert album.is_best_seller is None
assert len(album.tracks) == 3
for track in album.tracks:
assert track.play_count is None
assert track.position is None
collerek-ormar-c09209a/tests/test_exclude_include_dict/test_excluding_nested_models_lists.py 0000664 0000000 0000000 00000000000 15130200524 0033015 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_exclude_include_dict/test_excluding_subset_of_columns.py 0000664 0000000 0000000 00000017043 15130200524 0032522 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pydantic
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False)
founded: int = ormar.Integer(nullable=True)
class Car(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cars")
id: int = ormar.Integer(primary_key=True)
manufacturer: Optional[Company] = ormar.ForeignKey(Company)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
gearbox_type: str = ormar.String(max_length=20, nullable=True)
gears: int = ormar.Integer(nullable=True, name="gears_number")
aircon_type: str = ormar.String(max_length=20, nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_selecting_subset():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
toyota = await Company.objects.create(name="Toyota", founded=1937)
await Car.objects.create(
manufacturer=toyota,
name="Corolla",
year=2020,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Yaris",
year=2019,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Supreme",
year=2020,
gearbox_type="Auto",
gears=6,
aircon_type="Auto",
)
all_cars = (
await Car.objects.select_related("manufacturer")
.exclude_fields(
[
"gearbox_type",
"gears",
"aircon_type",
"year",
"manufacturer__founded",
]
)
.all()
)
for car in all_cars:
assert all(
getattr(car, x) is None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
assert car.manufacturer.name == "Toyota"
assert car.manufacturer.founded is None
all_cars = (
await Car.objects.select_related("manufacturer")
.exclude_fields(
{
"gearbox_type": ...,
"gears": ...,
"aircon_type": ...,
"year": ...,
"manufacturer": {"founded": ...},
}
)
.all()
)
all_cars2 = (
await Car.objects.select_related("manufacturer")
.exclude_fields(
{
"gearbox_type": ...,
"gears": ...,
"aircon_type": ...,
"year": ...,
"manufacturer": {"founded"},
}
)
.all()
)
assert all_cars == all_cars2
for car in all_cars:
assert all(
getattr(car, x) is None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
assert car.manufacturer.name == "Toyota"
assert car.manufacturer.founded is None
all_cars = (
await Car.objects.select_related("manufacturer")
.exclude_fields("year")
.exclude_fields(["gearbox_type", "gears"])
.exclude_fields("aircon_type")
.all()
)
for car in all_cars:
assert all(
getattr(car, x) is None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
assert car.manufacturer.name == "Toyota"
assert car.manufacturer.founded == 1937
all_cars_check = await Car.objects.select_related("manufacturer").all()
for car in all_cars_check:
assert all(
getattr(car, x) is not None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
assert car.manufacturer.name == "Toyota"
assert car.manufacturer.founded == 1937
all_cars_check2 = (
await Car.objects.select_related("manufacturer")
.fields(["id", "name", "manufacturer"])
.exclude_fields("manufacturer__founded")
.all()
)
for car in all_cars_check2:
assert all(
getattr(car, x) is None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
assert car.manufacturer.name == "Toyota"
assert car.manufacturer.founded is None
with pytest.raises(pydantic.ValidationError):
# cannot exclude mandatory model columns - company__name in this example
await Car.objects.select_related("manufacturer").exclude_fields(
["manufacturer__name"]
).all()
@pytest.mark.asyncio
async def test_excluding_nested_lists_in_dump():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
toyota = await Company.objects.create(name="Toyota", founded=1937)
car1 = await Car.objects.create(
manufacturer=toyota,
name="Corolla",
year=2020,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
car2 = await Car.objects.create(
manufacturer=toyota,
name="Yaris",
year=2019,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
manufacturer = await Company.objects.select_related("cars").get(
name="Toyota"
)
assert manufacturer.model_dump() == {
"cars": [
{
"aircon_type": "Manual",
"gearbox_type": "Manual",
"gears": 5,
"id": car1.id,
"name": "Corolla",
"year": 2020,
},
{
"aircon_type": "Manual",
"gearbox_type": "Manual",
"gears": 5,
"id": car2.id,
"name": "Yaris",
"year": 2019,
},
],
"founded": 1937,
"id": toyota.id,
"name": "Toyota",
}
assert manufacturer.model_dump(exclude_list=True) == {
"founded": 1937,
"id": toyota.id,
"name": "Toyota",
}
collerek-ormar-c09209a/tests/test_exclude_include_dict/test_pydantic_dict_params.py 0000664 0000000 0000000 00000006724 15130200524 0031114 0 ustar 00root root 0000000 0000000 from typing import List
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="Test", nullable=True)
visibility: bool = ormar.Boolean(default=True)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="items")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
price: float = ormar.Float(default=9.99)
categories: List[Category] = ormar.ManyToMany(Category)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_exclude_default():
async with base_ormar_config.database:
category = Category()
assert category.model_dump() == {
"id": None,
"items": [],
"name": "Test",
"visibility": True,
}
assert category.model_dump(exclude_defaults=True) == {"items": []}
await category.save()
category2 = await Category.objects.get()
assert category2.model_dump() == {
"id": 1,
"items": [],
"name": "Test",
"visibility": True,
}
assert category2.model_dump(exclude_defaults=True) == {"id": 1, "items": []}
assert category2.model_dump_json(exclude_defaults=True) == '{"id":1,"items":[]}'
@pytest.mark.asyncio
async def test_exclude_none():
async with base_ormar_config.database:
category = Category(id=2, name=None)
assert category.model_dump() == {
"id": 2,
"items": [],
"name": None,
"visibility": True,
}
assert category.model_dump(exclude_none=True) == {
"id": 2,
"items": [],
"visibility": True,
}
await category.save()
category2 = await Category.objects.get()
assert category2.model_dump() == {
"id": 2,
"items": [],
"name": None,
"visibility": True,
}
assert category2.model_dump(exclude_none=True) == {
"id": 2,
"items": [],
"visibility": True,
}
assert (
category2.model_dump_json(exclude_none=True)
== '{"id":2,"visibility":true,"items":[]}'
)
@pytest.mark.asyncio
async def test_exclude_unset():
async with base_ormar_config.database:
category = Category(id=3, name="Test 2")
assert category.model_dump() == {
"id": 3,
"items": [],
"name": "Test 2",
"visibility": True,
}
assert category.model_dump(exclude_unset=True) == {
"id": 3,
"items": [],
"name": "Test 2",
}
await category.save()
category2 = await Category.objects.get()
assert category2.model_dump() == {
"id": 3,
"items": [],
"name": "Test 2",
"visibility": True,
}
# NOTE how after loading from db all fields are set explicitly
# as this is what happens when you populate a model from db
assert category2.model_dump(exclude_unset=True) == {
"id": 3,
"items": [],
"name": "Test 2",
"visibility": True,
}
collerek-ormar-c09209a/tests/test_fastapi/ 0000775 0000000 0000000 00000000000 15130200524 0020601 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_fastapi/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022700 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_fastapi/test_binary_fields.py 0000664 0000000 0000000 00000004112 15130200524 0025022 0 ustar 00root root 0000000 0000000 import base64
import uuid
from enum import Enum
from typing import List
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
headers = {"content-type": "application/json"}
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
blob3 = b"\xc3\x83\x28"
blob4 = b"\xf0\x28\x8c\x28"
blob5 = b"\xee"
blob6 = b"\xff"
class BinaryEnum(Enum):
blob3 = blob3
blob4 = blob4
blob5 = blob5
blob6 = blob6
class BinaryThing(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="things")
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
name: str = ormar.Text(default="")
bt: str = ormar.LargeBinary(represent_as_base64_str=True, max_length=100)
create_test_database = init_tests(base_ormar_config)
@app.get("/things", response_model=List[BinaryThing])
async def read_things():
return await BinaryThing.objects.order_by("name").all()
@app.post("/things", response_model=BinaryThing)
async def create_things(thing: BinaryThing):
thing = await thing.save()
return thing
@pytest.mark.asyncio
async def test_read_main():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
response = await client.post(
"/things",
json={"bt": base64.b64encode(blob3).decode()},
headers=headers,
)
assert response.status_code == 200
response = await client.get("/things")
assert response.json()[0]["bt"] == base64.b64encode(blob3).decode()
thing = BinaryThing(**response.json()[0])
assert thing.__dict__["bt"] == blob3
assert thing.bt == base64.b64encode(blob3).decode()
def test_schema():
schema = BinaryThing.model_json_schema()
assert schema["properties"]["bt"]["format"] == "base64"
assert schema["example"]["bt"] == "string"
collerek-ormar-c09209a/tests/test_fastapi/test_docs_with_multiple_relations_to_one.py 0000664 0000000 0000000 00000004065 15130200524 0031540 0 ustar 00root root 0000000 0000000 from typing import Optional
from uuid import UUID, uuid4
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class CA(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cas")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
ca_name: str = ormar.Text(default="")
class CB1(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cb1s")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
cb1_name: str = ormar.Text(default="")
ca1: Optional[CA] = ormar.ForeignKey(CA, nullable=True)
class CB2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cb2s")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
cb2_name: str = ormar.Text(default="")
ca2: Optional[CA] = ormar.ForeignKey(CA, nullable=True)
create_test_database = init_tests(base_ormar_config)
@app.get("/ca", response_model=CA)
async def get_ca(): # pragma: no cover
return None
@app.get("/cb1", response_model=CB1)
async def get_cb1(): # pragma: no cover
return None
@app.get("/cb2", response_model=CB2)
async def get_cb2(): # pragma: no cover
return None
@pytest.mark.asyncio
async def test_all_endpoints():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
response = await client.get("/openapi.json")
assert response.status_code == 200, response.text
schema = response.json()
components = schema["components"]["schemas"]
raw_names_w_o_modules = [x.split("__")[-1] for x in components.keys()]
assert all(x in raw_names_w_o_modules for x in ["CA", "CB1", "CB2"])
pk_onlys = [x for x in list(raw_names_w_o_modules) if x.startswith("PkOnly")]
assert len(pk_onlys) == 4
collerek-ormar-c09209a/tests/test_fastapi/test_enum_schema.py 0000664 0000000 0000000 00000001224 15130200524 0024475 0 ustar 00root root 0000000 0000000 from enum import Enum
import ormar
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class MyEnum(Enum):
SMALL = 1
BIG = 2
class EnumExample(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="enum_example")
id: int = ormar.Integer(primary_key=True)
size: MyEnum = ormar.Enum(enum_class=MyEnum, default=MyEnum.SMALL)
create_test_database = init_tests(base_ormar_config)
def test_proper_schema():
schema = EnumExample.model_json_schema()
assert {"MyEnum": {"title": "MyEnum", "enum": [1, 2], "type": "integer"}} == schema[
"$defs"
]
collerek-ormar-c09209a/tests/test_fastapi/test_excludes_with_get_pydantic.py 0000664 0000000 0000000 00000011102 15130200524 0027606 0 ustar 00root root 0000000 0000000 from typing import ForwardRef, Optional
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class SelfRef(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="self_refs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="selfref")
parent = ormar.ForeignKey(ForwardRef("SelfRef"), related_name="children")
SelfRef.update_forward_refs()
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="test")
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
create_test_database = init_tests(base_ormar_config)
async def create_category(category: Category):
return await Category(**category.model_dump()).save()
create_category.__annotations__["category"] = Category.get_pydantic(exclude={"id"})
app.post("/categories/", response_model=Category)(create_category)
@app.post(
"/selfrefs/",
response_model=SelfRef.get_pydantic(exclude={"parent", "children__name"}),
)
async def create_selfref(
selfref: SelfRef.get_pydantic( # type: ignore
exclude={"children__name"} # noqa: F821
),
):
selfr = SelfRef(**selfref.model_dump())
await selfr.save()
if selfr.children:
for child in selfr.children:
await child.upsert()
return selfr
@app.get("/selfrefs/{ref_id}/")
async def get_selfref(ref_id: int):
selfr = await SelfRef.objects.select_related("children").get(id=ref_id)
return selfr
@pytest.mark.asyncio
async def test_read_main():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
test_category = dict(name="Foo", id=12)
response = await client.post("/categories/", json=test_category)
assert response.status_code == 200
cat = Category(**response.json())
assert cat.name == "Foo"
assert cat.id == 1
assert cat.items == []
test_selfref = dict(name="test")
test_selfref2 = dict(name="test2", parent={"id": 1})
test_selfref3 = dict(name="test3", children=[{"name": "aaa"}])
response = await client.post("/selfrefs/", json=test_selfref)
assert response.status_code == 200
self_ref = SelfRef(**response.json())
assert self_ref.id == 1
assert self_ref.name == "test"
assert self_ref.parent is None
assert self_ref.children == []
response = await client.post("/selfrefs/", json=test_selfref2)
assert response.status_code == 200
self_ref = SelfRef(**response.json())
assert self_ref.id == 2
assert self_ref.name == "test2"
assert self_ref.parent is None
assert self_ref.children == []
response = await client.post("/selfrefs/", json=test_selfref3)
assert response.status_code == 200
self_ref = SelfRef(**response.json())
assert self_ref.id == 3
assert self_ref.name == "test3"
assert self_ref.parent is None
assert self_ref.children[0].model_dump() == {"id": 4}
response = await client.get("/selfrefs/3/")
assert response.status_code == 200
check_children = SelfRef(**response.json())
assert check_children.children[0].model_dump() == {
"children": [],
"id": 4,
"name": "selfref",
"parent": {"id": 3, "name": "test3"},
}
response = await client.get("/selfrefs/2/")
assert response.status_code == 200
check_children = SelfRef(**response.json())
assert check_children.model_dump() == {
"children": [],
"id": 2,
"name": "test2",
"parent": {"id": 1},
}
response = await client.get("/selfrefs/1/")
assert response.status_code == 200
check_children = SelfRef(**response.json())
assert check_children.model_dump() == {
"children": [{"id": 2, "name": "test2"}],
"id": 1,
"name": "test",
"parent": None,
}
collerek-ormar-c09209a/tests/test_fastapi/test_excluding_fields.py 0000664 0000000 0000000 00000006653 15130200524 0025534 0 ustar 00root root 0000000 0000000 from typing import List
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="items")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
categories: List[Category] = ormar.ManyToMany(Category)
create_test_database = init_tests(base_ormar_config)
@app.post("/items/", response_model=Item)
async def create_item(item: Item):
await item.save_related(follow=True, save_all=True)
return item
@app.get("/items/{item_id}")
async def get_item(item_id: int):
item = await Item.objects.select_related("categories").get(pk=item_id)
return item.model_dump(exclude_primary_keys=True, exclude_through_models=True)
@app.get("/categories/{category_id}")
async def get_category(category_id: int):
category = await Category.objects.select_related("items").get(pk=category_id)
return category.model_dump(exclude_primary_keys=True)
@app.get("/categories/nt/{category_id}")
async def get_category_no_through(category_id: int):
category = await Category.objects.select_related("items").get(pk=category_id)
result = category.model_dump(exclude_through_models=True)
return result
@app.get("/categories/ntp/{category_id}")
async def get_category_no_pk_through(category_id: int):
category = await Category.objects.select_related("items").get(pk=category_id)
return category.model_dump(exclude_through_models=True, exclude_primary_keys=True)
@pytest.mark.asyncio
async def test_all_endpoints():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
item = {
"name": "test",
"categories": [{"name": "test cat"}, {"name": "test cat2"}],
}
response = await client.post("/items/", json=item)
item_check = Item(**response.json())
assert item_check.id is not None
assert item_check.categories[0].id is not None
no_pk_item = (await client.get(f"/items/{item_check.id}")).json()
assert no_pk_item == item
no_pk_category = (
await client.get(f"/categories/{item_check.categories[0].id}")
).json()
assert no_pk_category == {
"items": [
{
"itemcategory": {"category": None, "id": 1, "item": None},
"name": "test",
}
],
"name": "test cat",
}
no_through_category = (
await client.get(f"/categories/nt/{item_check.categories[0].id}")
).json()
assert no_through_category == {
"id": 1,
"items": [{"id": 1, "name": "test"}],
"name": "test cat",
}
no_through_category = (
await client.get(f"/categories/ntp/{item_check.categories[0].id}")
).json()
assert no_through_category == {"items": [{"name": "test"}], "name": "test cat"}
collerek-ormar-c09209a/tests/test_fastapi/test_extra_ignore_parameter.py 0000664 0000000 0000000 00000002213 15130200524 0026736 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from ormar import Extra
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class Item(ormar.Model):
ormar_config = base_ormar_config.copy(extra=Extra.ignore)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
create_test_database = init_tests(base_ormar_config)
@app.post("/item/", response_model=Item)
async def create_item(item: Item):
return await item.save()
@pytest.mark.asyncio
async def test_extra_parameters_in_request():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
data = {"name": "Name", "extraname": "to ignore"}
resp = await client.post("item/", json=data)
assert resp.status_code == 200
assert "name" in resp.json()
assert resp.json().get("name") == "Name"
collerek-ormar-c09209a/tests/test_fastapi/test_fastapi_docs.py 0000664 0000000 0000000 00000011352 15130200524 0024653 0 ustar 00root root 0000000 0000000 import datetime
from typing import List, Optional, Union
import ormar
import pydantic
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from pydantic import Field
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class PTestA(pydantic.BaseModel):
c: str
d: bytes
e: datetime.datetime
class PTestP(pydantic.BaseModel):
a: int
b: Optional[PTestA]
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
pydantic_int: Optional[int] = None
test_P: List[PTestP] = Field(default_factory=list)
test_P_or_A: Union[int, str, None] = None
categories = ormar.ManyToMany(Category)
create_test_database = init_tests(base_ormar_config)
@app.get("/items/", response_model=List[Item])
async def get_items():
items = await Item.objects.select_related("categories").all()
for item in items:
item.test_P_or_A = 2
return items
@app.post("/items/", response_model=Item)
async def create_item(item: Item):
await item.save()
return item
@app.post("/items/add_category/", response_model=Item)
async def add_item_category(item: Item, category: Category):
await item.categories.add(category)
return item
@app.post("/categories/", response_model=Category)
async def create_category(category: Category):
await category.save()
return category
@pytest.mark.asyncio
async def test_all_endpoints():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
response = await client.post("/categories/", json={"name": "test cat"})
assert response.status_code == 200
category = response.json()
response = await client.post("/categories/", json={"name": "test cat2"})
assert response.status_code == 200
category2 = response.json()
response = await client.post(
"/items/", json={"name": "test", "id": 1, "test_P_or_A": 0}
)
assert response.status_code == 200
item = Item(**response.json())
assert item.pk is not None
response = await client.post(
"/items/add_category/",
json={"item": item.model_dump(), "category": category},
)
assert response.status_code == 200
item = Item(**response.json())
assert len(item.categories) == 1
assert item.categories[0].name == "test cat"
await client.post(
"/items/add_category/",
json={"item": item.model_dump(), "category": category2},
)
response = await client.get("/items/")
assert response.status_code == 200
items = [Item(**item) for item in response.json()]
assert items[0] == item
assert len(items[0].categories) == 2
assert items[0].categories[0].name == "test cat"
assert items[0].categories[1].name == "test cat2"
response = await client.get("/docs")
assert response.status_code == 200
assert b"FastAPI - Swagger UI" in response.content
def test_schema_modification():
schema = Item.model_json_schema()
assert any(
x.get("type") == "array" for x in schema["properties"]["categories"]["anyOf"]
)
assert schema["properties"]["categories"]["title"] == "Categories"
assert schema["example"] == {
"categories": [{"id": 0, "name": "string"}],
"id": 0,
"name": "string",
"pydantic_int": 0,
"test_P": [{"a": 0, "b": {"c": "string", "d": "string", "e": "string"}}],
"test_P_or_A": (0, "string"),
}
schema = Category.model_json_schema()
assert schema["$defs"]["Category"]["example"] == {
"id": 0,
"name": "string",
"items": [
{
"id": 0,
"name": "string",
"pydantic_int": 0,
"test_P": [
{"a": 0, "b": {"c": "string", "d": "string", "e": "string"}}
],
"test_P_or_A": (0, "string"),
}
],
}
def test_schema_gen():
schema = app.openapi()
assert "Category" in schema["components"]["schemas"]
subschemas = [x.split("__")[-1] for x in schema["components"]["schemas"]]
assert "Item-Input" in subschemas
assert "Item-Output" in subschemas
collerek-ormar-c09209a/tests/test_fastapi/test_fastapi_usage.py 0000664 0000000 0000000 00000003544 15130200524 0025033 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="items")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
create_test_database = init_tests(base_ormar_config)
@app.post("/items/", response_model=Item)
async def create_item(item: Item):
return item
@pytest.mark.asyncio
async def test_read_main():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
response = await client.post(
"/items/", json={"name": "test", "id": 1, "category": {"name": "test cat"}}
)
assert response.status_code == 200
assert response.json() == {
"category": {
"id": None,
"items": [
{
"category": {"id": None, "name": "test cat"},
"id": 1,
"name": "test",
}
],
"name": "test cat",
},
"id": 1,
"name": "test",
}
item = Item(**response.json())
assert item.id == 1
assert item.category.items[0].id == 1
collerek-ormar-c09209a/tests/test_fastapi/test_inheritance_concrete_fastapi.py 0000664 0000000 0000000 00000026165 15130200524 0030106 0 ustar 00root root 0000000 0000000 import datetime
from typing import List, Optional
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from ormar.relations.relation_proxy import RelationProxy
from pydantic import computed_field
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class AuditModel(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
created_by: str = ormar.String(max_length=100)
updated_by: str = ormar.String(max_length=100, default="Sam")
@computed_field
def audit(self) -> str: # pragma: no cover
return f"{self.created_by} {self.updated_by}"
class DateFieldsModelNoSubclass(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_date_models")
date_id: int = ormar.Integer(primary_key=True)
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
updated_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class DateFieldsModel(ormar.Model):
ormar_config = base_ormar_config.copy(
abstract=True,
constraints=[
ormar.fields.constraints.UniqueColumns(
"creation_date",
"modification_date",
),
ormar.fields.constraints.CheckColumns(
"creation_date <= modification_date",
),
],
)
created_date: datetime.datetime = ormar.DateTime(
default=datetime.datetime.now, name="creation_date"
)
updated_date: datetime.datetime = ormar.DateTime(
default=datetime.datetime.now, name="modification_date"
)
class Person(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Car(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
owner: Person = ormar.ForeignKey(Person)
co_owner: Person = ormar.ForeignKey(Person, related_name="coowned")
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class Car2(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
owner: Person = ormar.ForeignKey(Person, related_name="owned")
co_owners: RelationProxy[Person] = ormar.ManyToMany(Person, related_name="coowned")
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class Bus(Car):
ormar_config = base_ormar_config.copy(tablename="buses")
owner: Person = ormar.ForeignKey(Person, related_name="buses")
max_persons: int = ormar.Integer()
class Bus2(Car2):
ormar_config = base_ormar_config.copy(tablename="buses2")
max_persons: int = ormar.Integer()
class Category(DateFieldsModel, AuditModel):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
code: int = ormar.Integer()
@computed_field
def code_name(self) -> str:
return f"{self.code}:{self.name}"
@computed_field
def audit(self) -> str:
return f"{self.created_by} {self.updated_by}"
class Subject(DateFieldsModel):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
category: Optional[Category] = ormar.ForeignKey(Category)
class Truck(Car):
ormar_config = base_ormar_config.copy()
max_capacity: int = ormar.Integer()
class Truck2(Car2):
ormar_config = base_ormar_config.copy(tablename="trucks2")
max_capacity: int = ormar.Integer()
create_test_database = init_tests(base_ormar_config)
@app.post("/subjects/", response_model=Subject)
async def create_item(item: Subject):
return item
@app.post("/categories/", response_model=Category)
async def create_category(category: Category):
await category.save()
return category
@app.post("/buses/", response_model=Bus)
async def create_bus(bus: Bus):
await bus.save()
return bus
@app.get("/buses/{item_id}", response_model=Bus)
async def get_bus(item_id: int):
bus = await Bus.objects.select_related(["owner", "co_owner"]).get(pk=item_id)
return bus
@app.get("/buses/", response_model=List[Bus])
async def get_buses():
buses = await Bus.objects.select_related(["owner", "co_owner"]).all()
return buses
@app.post("/trucks/", response_model=Truck)
async def create_truck(truck: Truck):
await truck.save()
return truck
@app.post("/persons/", response_model=Person)
async def create_person(person: Person):
await person.save()
return person
@app.post("/buses2/", response_model=Bus2)
async def create_bus2(bus: Bus2):
await bus.save()
return bus
@app.post("/buses2/{item_id}/add_coowner/", response_model=Bus2)
async def add_bus_coowner(item_id: int, person: Person):
bus = await Bus2.objects.select_related(["owner", "co_owners"]).get(pk=item_id)
await bus.co_owners.add(person)
return bus
@app.get("/buses2/", response_model=List[Bus2])
async def get_buses2():
buses = await Bus2.objects.select_related(["owner", "co_owners"]).all()
return buses
@app.post("/trucks2/", response_model=Truck2)
async def create_truck2(truck: Truck2):
await truck.save()
return truck
@app.post("/trucks2/{item_id}/add_coowner/", response_model=Truck2)
async def add_truck_coowner(item_id: int, person: Person):
truck = await Truck2.objects.select_related(["owner", "co_owners"]).get(pk=item_id)
await truck.co_owners.add(person)
return truck
@pytest.mark.asyncio
async def test_read_main():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
test_category = dict(name="Foo", code=123, created_by="Sam", updated_by="Max")
test_subject = dict(name="Bar")
response = await client.post("/categories/", json=test_category)
assert response.status_code == 200
cat = Category(**response.json())
assert cat.name == "Foo"
assert cat.created_by == "Sam"
assert cat.created_date is not None
assert cat.id == 1
cat_dict = cat.model_dump()
cat_dict["updated_date"] = cat_dict["updated_date"].strftime(
"%Y-%m-%d %H:%M:%S.%f"
)
cat_dict["created_date"] = cat_dict["created_date"].strftime(
"%Y-%m-%d %H:%M:%S.%f"
)
test_subject["category"] = cat_dict
response = await client.post("/subjects/", json=test_subject)
assert response.status_code == 200
sub = Subject(**response.json())
assert sub.name == "Bar"
assert sub.category.pk == cat.pk
assert isinstance(sub.updated_date, datetime.datetime)
@pytest.mark.asyncio
async def test_inheritance_with_relation():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
sam = Person(**(await client.post("/persons/", json={"name": "Sam"})).json())
joe = Person(**(await client.post("/persons/", json={"name": "Joe"})).json())
truck_dict = dict(
name="Shelby wanna be",
max_capacity=1400,
owner=sam.model_dump(),
co_owner=joe.model_dump(),
)
bus_dict = dict(
name="Unicorn",
max_persons=50,
owner=sam.model_dump(),
co_owner=joe.model_dump(),
)
unicorn = Bus(**(await client.post("/buses/", json=bus_dict)).json())
shelby = Truck(**(await client.post("/trucks/", json=truck_dict)).json())
assert shelby.name == "Shelby wanna be"
assert shelby.owner.name == "Sam"
assert shelby.co_owner.name == "Joe"
assert shelby.co_owner == joe
assert shelby.max_capacity == 1400
assert unicorn.name == "Unicorn"
assert unicorn.owner == sam
assert unicorn.owner.name == "Sam"
assert unicorn.co_owner.name == "Joe"
assert unicorn.max_persons == 50
unicorn2 = Bus(**(await client.get(f"/buses/{unicorn.pk}")).json())
assert unicorn2.name == "Unicorn"
assert unicorn2.owner == sam
assert unicorn2.owner.name == "Sam"
assert unicorn2.co_owner.name == "Joe"
assert unicorn2.max_persons == 50
buses = [Bus(**x) for x in (await client.get("/buses/")).json()]
assert len(buses) == 1
assert buses[0].name == "Unicorn"
@pytest.mark.asyncio
async def test_inheritance_with_m2m_relation():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
sam = Person(**(await client.post("/persons/", json={"name": "Sam"})).json())
joe = Person(**(await client.post("/persons/", json={"name": "Joe"})).json())
alex = Person(**(await client.post("/persons/", json={"name": "Alex"})).json())
truck_dict = dict(
name="Shelby wanna be", max_capacity=2000, owner=sam.model_dump()
)
bus_dict = dict(name="Unicorn", max_persons=80, owner=sam.model_dump())
unicorn = Bus2(**(await client.post("/buses2/", json=bus_dict)).json())
shelby = Truck2(**(await client.post("/trucks2/", json=truck_dict)).json())
unicorn = Bus2(
**(
await client.post(
f"/buses2/{unicorn.pk}/add_coowner/", json=joe.model_dump()
)
).json()
)
unicorn = Bus2(
**(
await client.post(
f"/buses2/{unicorn.pk}/add_coowner/", json=alex.model_dump()
)
).json()
)
assert shelby.name == "Shelby wanna be"
assert shelby.owner.name == "Sam"
assert len(shelby.co_owners) == 0
assert shelby.max_capacity == 2000
assert unicorn.name == "Unicorn"
assert unicorn.owner == sam
assert unicorn.owner.name == "Sam"
assert unicorn.co_owners[0].name == "Joe"
assert unicorn.co_owners[1] == alex
assert unicorn.max_persons == 80
await client.post(f"/trucks2/{shelby.pk}/add_coowner/", json=alex.model_dump())
shelby = Truck2(
**(
await client.post(
f"/trucks2/{shelby.pk}/add_coowner/", json=joe.model_dump()
)
).json()
)
assert shelby.name == "Shelby wanna be"
assert shelby.owner.name == "Sam"
assert len(shelby.co_owners) == 2
assert shelby.co_owners[0] == alex
assert shelby.co_owners[1] == joe
assert shelby.max_capacity == 2000
buses = [Bus2(**x) for x in (await client.get("/buses2/")).json()]
assert len(buses) == 1
assert buses[0].name == "Unicorn"
collerek-ormar-c09209a/tests/test_fastapi/test_inheritance_mixins_fastapi.py 0000664 0000000 0000000 00000005403 15130200524 0027603 0 ustar 00root root 0000000 0000000 import datetime
from typing import Optional
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class AuditMixin:
created_by: str = ormar.String(max_length=100)
updated_by: str = ormar.String(max_length=100, default="Sam")
class DateFieldsMixins:
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
updated_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class Category(ormar.Model, DateFieldsMixins, AuditMixin):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
code: int = ormar.Integer()
class Subject(ormar.Model, DateFieldsMixins):
ormar_config = base_ormar_config.copy(tablename="subjects")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
category: Optional[Category] = ormar.ForeignKey(Category)
create_test_database = init_tests(base_ormar_config)
@app.post("/subjects/", response_model=Subject)
async def create_item(item: Subject):
return item
@app.post("/categories/", response_model=Category)
async def create_category(category: Category):
await category.save()
return category
@pytest.mark.asyncio
async def test_read_main():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
test_category = dict(name="Foo", code=123, created_by="Sam", updated_by="Max")
test_subject = dict(name="Bar")
response = await client.post("/categories/", json=test_category)
assert response.status_code == 200
cat = Category(**response.json())
assert cat.name == "Foo"
assert cat.created_by == "Sam"
assert cat.created_date is not None
assert cat.id == 1
cat_dict = cat.model_dump()
cat_dict["updated_date"] = cat_dict["updated_date"].strftime(
"%Y-%m-%d %H:%M:%S.%f"
)
cat_dict["created_date"] = cat_dict["created_date"].strftime(
"%Y-%m-%d %H:%M:%S.%f"
)
test_subject["category"] = cat_dict
response = await client.post("/subjects/", json=test_subject)
assert response.status_code == 200
sub = Subject(**response.json())
assert sub.name == "Bar"
assert sub.category.pk == cat.pk
assert isinstance(sub.updated_date, datetime.datetime)
collerek-ormar-c09209a/tests/test_fastapi/test_json_field_fastapi.py 0000664 0000000 0000000 00000012365 15130200524 0026044 0 ustar 00root root 0000000 0000000 # type: ignore
import uuid
from typing import List
import ormar
import pydantic
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class Thing(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="things")
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
name: str = ormar.Text(default="")
js: pydantic.Json = ormar.JSON()
create_test_database = init_tests(base_ormar_config)
@app.get("/things", response_model=List[Thing])
async def read_things():
return await Thing.objects.order_by("name").all()
@app.get("/things_with_sample", response_model=List[Thing])
async def read_things_sample():
await Thing(name="b", js=["asdf", "asdf", "bobby", "nigel"]).save()
await Thing(name="a", js='["lemon", "raspberry", "lime", "pumice"]').save()
return await Thing.objects.order_by("name").all()
@app.get("/things_with_sample_after_init", response_model=Thing)
async def read_things_init():
thing1 = Thing(js="{}")
thing1.name = "d"
thing1.js = ["js", "set", "after", "constructor"]
await thing1.save()
return thing1
@app.put("/update_thing", response_model=Thing)
async def update_things(thing: Thing):
thing.js = ["js", "set", "after", "update"] # type: ignore
await thing.update()
return thing
@app.post("/things", response_model=Thing)
async def create_things(thing: Thing):
thing = await thing.save()
return thing
@app.get("/things_untyped")
async def read_things_untyped():
return await Thing.objects.order_by("name").all()
@pytest.mark.asyncio
async def test_json_is_required_if_not_nullable():
with pytest.raises(pydantic.ValidationError):
Thing()
@pytest.mark.asyncio
async def test_json_is_not_required_if_nullable():
class Thing2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="things2")
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
name: str = ormar.Text(default="")
js: pydantic.Json = ormar.JSON(nullable=True)
Thing2()
@pytest.mark.asyncio
async def test_setting_values_after_init():
async with base_ormar_config.database:
t1 = Thing(id="67a82813-d90c-45ff-b546-b4e38d7030d7", name="t1", js=["thing1"])
assert '["thing1"]' in t1.model_dump_json()
await t1.save()
t1.model_dump_json()
assert '["thing1"]' in t1.model_dump_json()
assert '["thing1"]' in (await Thing.objects.get(id=t1.id)).model_dump_json()
await t1.update()
assert '["thing1"]' in (await Thing.objects.get(id=t1.id)).model_dump_json()
@pytest.mark.asyncio
async def test_read_main():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
response = await client.get("/things_with_sample")
assert response.status_code == 200
# check if raw response not double encoded
assert '["lemon","raspberry","lime","pumice"]' in response.text
# parse json and check that we get lists not strings
resp = response.json()
assert resp[0].get("js") == ["lemon", "raspberry", "lime", "pumice"]
assert resp[1].get("js") == ["asdf", "asdf", "bobby", "nigel"]
# create a new one
response = await client.post(
"/things", json={"js": ["test", "test2"], "name": "c"}
)
assert response.json().get("js") == ["test", "test2"]
# get all with new one
response = await client.get("/things")
assert response.status_code == 200
assert '["test","test2"]' in response.text
resp = response.json()
assert resp[0].get("js") == ["lemon", "raspberry", "lime", "pumice"]
assert resp[1].get("js") == ["asdf", "asdf", "bobby", "nigel"]
assert resp[2].get("js") == ["test", "test2"]
response = await client.get("/things_with_sample_after_init")
assert response.status_code == 200
resp = response.json()
assert resp.get("js") == ["js", "set", "after", "constructor"]
# test new with after constructor
response = await client.get("/things")
resp = response.json()
assert resp[0].get("js") == ["lemon", "raspberry", "lime", "pumice"]
assert resp[1].get("js") == ["asdf", "asdf", "bobby", "nigel"]
assert resp[2].get("js") == ["test", "test2"]
assert resp[3].get("js") == ["js", "set", "after", "constructor"]
response = await client.put("/update_thing", json=resp[3])
assert response.status_code == 200
resp = response.json()
assert resp.get("js") == ["js", "set", "after", "update"]
# test new with after constructor
response = await client.get("/things_untyped")
resp = response.json()
assert resp[0].get("js") == ["lemon", "raspberry", "lime", "pumice"]
assert resp[1].get("js") == ["asdf", "asdf", "bobby", "nigel"]
assert resp[2].get("js") == ["test", "test2"]
assert resp[3].get("js") == ["js", "set", "after", "update"]
collerek-ormar-c09209a/tests/test_fastapi/test_m2m_forwardref.py 0000664 0000000 0000000 00000004736 15130200524 0025140 0 ustar 00root root 0000000 0000000 from typing import ForwardRef, List, Optional
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from starlette import status
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
CityRef = ForwardRef("City")
CountryRef = ForwardRef("Country")
# models.py
class Country(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="countries")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=128, unique=True)
iso2: str = ormar.String(max_length=3)
iso3: str = ormar.String(max_length=4, unique=True)
population: int = ormar.Integer(maximum=10000000000)
demonym: str = ormar.String(max_length=128)
native_name: str = ormar.String(max_length=128)
capital: Optional[CityRef] = ormar.ForeignKey( # type: ignore
CityRef, related_name="capital_city", nullable=True
)
borders: List[Optional[CountryRef]] = ormar.ManyToMany( # type: ignore
CountryRef, nullable=True, skip_reverse=True
)
class City(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cities")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=128)
country: Country = ormar.ForeignKey(
Country, related_name="cities", skip_reverse=True
)
Country.update_forward_refs()
create_test_database = init_tests(base_ormar_config)
@app.post("/", response_model=Country, status_code=status.HTTP_201_CREATED)
async def create_country(country: Country): # if this is ormar
result = await country.upsert() # it's already initialized as ormar model
return result
@pytest.mark.asyncio
async def test_payload():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
payload = {
"name": "Thailand",
"iso2": "TH",
"iso3": "THA",
"population": 23123123,
"demonym": "Thai",
"native_name": "Thailand",
}
resp = await client.post(
"/", json=payload, headers={"application-type": "json"}
)
# print(resp.content)
assert resp.status_code == 201
resp_country = Country(**resp.json())
assert resp_country.name == "Thailand"
collerek-ormar-c09209a/tests/test_fastapi/test_more_reallife_fastapi.py 0000664 0000000 0000000 00000007562 15130200524 0026540 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="items")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
create_test_database = init_tests(base_ormar_config)
@app.get("/items", response_model=List[Item])
async def get_items():
items = await Item.objects.select_related("category").all()
return items
@app.get("/items/raw", response_model=List[Item])
async def get_raw_items():
items = await Item.objects.all()
return items
@app.post("/items", response_model=Item)
async def create_item(item: Item):
await item.save()
return item
@app.post("/categories", response_model=Category)
async def create_category(category: Category):
await category.save()
return category
@app.get("/items/{item_id}")
async def get_item(item_id: int):
item = await Item.objects.get(pk=item_id)
return item
@app.put("/items/{item_id}")
async def update_item(item_id: int, item: Item):
item_db = await Item.objects.get(pk=item_id)
return await item_db.update(**item.model_dump())
@app.delete("/items/{item_id}")
async def delete_item(item_id: int):
item_db = await Item.objects.get(pk=item_id)
return {"deleted_rows": await item_db.delete()}
@pytest.mark.asyncio
async def test_all_endpoints():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
response = await client.post("/categories", json={"name": "test cat"})
category = response.json()
response = await client.post(
"/items", json={"name": "test", "id": 1, "category": category}
)
item = Item(**response.json())
assert item.pk is not None
response = await client.get("/items")
items = [Item(**item) for item in response.json()]
assert items[0] == item
item.name = "New name"
response = await client.put(f"/items/{item.pk}", json=item.model_dump())
assert response.json() == item.model_dump()
response = await client.get("/items")
items = [Item(**item) for item in response.json()]
assert items[0].name == "New name"
response = await client.get("/items/raw")
items = [Item(**item) for item in response.json()]
assert items[0].name == "New name"
assert items[0].category.name is None
response = await client.get(f"/items/{item.pk}")
new_item = Item(**response.json())
assert new_item == item
response = await client.delete(f"/items/{item.pk}")
assert response.json().get("deleted_rows", "__UNDEFINED__") != "__UNDEFINED__"
response = await client.get("/items")
items = response.json()
assert len(items) == 0
await client.post(
"/items", json={"name": "test_2", "id": 2, "category": category}
)
response = await client.get("/items")
items = response.json()
assert len(items) == 1
item = Item(**items[0])
response = await client.delete(f"/items/{item.pk}")
assert response.json().get("deleted_rows", "__UNDEFINED__") != "__UNDEFINED__"
response = await client.get("/docs")
assert response.status_code == 200
collerek-ormar-c09209a/tests/test_fastapi/test_nested_saving.py 0000664 0000000 0000000 00000011051 15130200524 0025041 0 ustar 00root root 0000000 0000000 from typing import Any, Dict, Optional, Set, Type, Union, cast
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from ormar.queryset.utils import translate_list_to_dict
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
headers = {"content-type": "application/json"}
class Department(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
department_name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
course_name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean()
department: Optional[Department] = ormar.ForeignKey(Department)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
courses = ormar.ManyToMany(Course)
to_exclude = {
"id": ...,
"courses": {
"__all__": {"id": ..., "students": {"__all__": {"id", "studentcourse"}}}
},
}
exclude_all = {"id": ..., "courses": {"__all__"}}
to_exclude_ormar = {
"id": ...,
"courses": {"id": ..., "students": {"id", "studentcourse"}},
}
create_test_database = init_tests(base_ormar_config)
def auto_exclude_id_field(to_exclude: Any) -> Union[Dict, Set]:
if isinstance(to_exclude, dict):
for key in to_exclude.keys():
to_exclude[key] = auto_exclude_id_field(to_exclude[key])
to_exclude["id"] = Ellipsis
return to_exclude
else:
return {"id"}
def generate_exclude_for_ids(model: Type[ormar.Model]) -> Dict:
to_exclude_base = translate_list_to_dict(model._iterate_related_models())
return cast(Dict, auto_exclude_id_field(to_exclude=to_exclude_base))
to_exclude_auto = generate_exclude_for_ids(model=Department)
@app.post("/departments/", response_model=Department)
async def create_department(department: Department):
await department.save_related(follow=True, save_all=True)
return department
@app.get("/departments/{department_name}")
async def get_department(department_name: str):
department = await Department.objects.select_all(follow=True).get(
department_name=department_name
)
return department.model_dump(exclude=to_exclude)
@app.get("/departments/{department_name}/second")
async def get_department_exclude(department_name: str):
department = await Department.objects.select_all(follow=True).get(
department_name=department_name
)
return department.model_dump(exclude=to_exclude_ormar)
@app.get("/departments/{department_name}/exclude")
async def get_department_exclude_all(department_name: str):
department = await Department.objects.select_all(follow=True).get(
department_name=department_name
)
return department.model_dump(exclude=exclude_all)
@pytest.mark.asyncio
async def test_saving_related_in_fastapi():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
payload = {
"department_name": "Ormar",
"courses": [
{
"course_name": "basic1",
"completed": True,
"students": [{"name": "Jack"}, {"name": "Abi"}],
},
{
"course_name": "basic2",
"completed": True,
"students": [{"name": "Kate"}, {"name": "Miranda"}],
},
],
}
response = await client.post("/departments/", json=payload, headers=headers)
department = Department(**response.json())
assert department.id is not None
assert len(department.courses) == 2
assert department.department_name == "Ormar"
assert department.courses[0].course_name == "basic1"
assert department.courses[0].completed
assert department.courses[1].course_name == "basic2"
assert department.courses[1].completed
response = await client.get("/departments/Ormar")
response2 = await client.get("/departments/Ormar/second")
assert response.json() == response2.json() == payload
response3 = await client.get("/departments/Ormar/exclude")
assert response3.json() == {"department_name": "Ormar"}
collerek-ormar-c09209a/tests/test_fastapi/test_recursion_error.py 0000664 0000000 0000000 00000007151 15130200524 0025440 0 ustar 00root root 0000000 0000000 import uuid
from datetime import datetime
from typing import List
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import Depends, FastAPI
from httpx import ASGITransport, AsyncClient
from pydantic import BaseModel, Json
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
router = FastAPI(lifespan=lifespan(base_ormar_config))
headers = {"content-type": "application/json"}
class User(ormar.Model):
"""
The user model
"""
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
email: str = ormar.String(unique=True, max_length=100)
username: str = ormar.String(unique=True, max_length=100)
password: str = ormar.String(unique=True, max_length=100)
verified: bool = ormar.Boolean(default=False)
verify_key: str = ormar.String(unique=True, max_length=100, nullable=True)
created_at: datetime = ormar.DateTime(default=datetime.now())
ormar_config = base_ormar_config.copy(tablename="users")
class UserSession(ormar.Model):
"""
The user session model
"""
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
user: User = ormar.ForeignKey(User)
session_key: str = ormar.String(unique=True, max_length=64)
created_at: datetime = ormar.DateTime(default=datetime.now())
ormar_config = base_ormar_config.copy(tablename="user_sessions")
class QuizAnswer(BaseModel):
right: bool
answer: str
class QuizQuestion(BaseModel):
question: str
answers: List[QuizAnswer]
class QuizInput(BaseModel):
title: str
description: str
questions: List[QuizQuestion]
class Quiz(ormar.Model):
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
title: str = ormar.String(max_length=100)
description: str = ormar.String(max_length=300, nullable=True)
created_at: datetime = ormar.DateTime(default=datetime.now())
updated_at: datetime = ormar.DateTime(default=datetime.now())
user_id: uuid.UUID = ormar.UUID(foreign_key=User.id)
questions: Json = ormar.JSON(nullable=False)
ormar_config = base_ormar_config.copy(tablename="quiz")
create_test_database = init_tests(base_ormar_config)
async def get_current_user():
return await User(email="mail@example.com", username="aa", password="pass").save()
@router.post("/create", response_model=Quiz)
async def create_quiz_lol(
quiz_input: QuizInput, user: User = Depends(get_current_user)
):
quiz = Quiz(**quiz_input.model_dump(), user_id=user.id)
return await quiz.save()
@pytest.mark.asyncio
async def test_quiz_creation():
transport = ASGITransport(app=router)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(router):
payload = {
"title": "Some test question",
"description": "A description",
"questions": [
{
"question": "Is ClassQuiz cool?",
"answers": [
{"right": True, "answer": "Yes"},
{"right": False, "answer": "No"},
],
},
{
"question": "Do you like open source?",
"answers": [
{"right": True, "answer": "Yes"},
{"right": False, "answer": "No"},
{"right": False, "answer": "Maybe"},
],
},
],
}
response = await client.post("/create", json=payload)
assert response.status_code == 200
collerek-ormar-c09209a/tests/test_fastapi/test_relations_with_nested_defaults.py 0000664 0000000 0000000 00000007643 15130200524 0030510 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
import pytest_asyncio
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class Country(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="countries")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="Poland")
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
rating: int = ormar.Integer(default=0)
country: Optional[Country] = ormar.ForeignKey(Country)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture
async def sample_data():
async with base_ormar_config.database:
country = await Country(id=1, name="USA").save()
author = await Author(id=1, name="bug", rating=5, country=country).save()
await Book(
id=1, author=author, title="Bug caused by default value", year=2021
).save()
@app.get("/books/{book_id}", response_model=Book)
async def get_book_by_id(book_id: int):
book = await Book.objects.get(id=book_id)
return book
@app.get("/books_with_author/{book_id}", response_model=Book)
async def get_book_with_author_by_id(book_id: int):
book = await Book.objects.select_related("author").get(id=book_id)
return book
@pytest.mark.asyncio
async def test_related_with_defaults(sample_data):
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
response = await client.get("/books/1")
assert response.json() == {
"author": {
"books": [
{
"author": {"id": 1},
"id": 1,
"title": "Bug caused by default value",
"year": 2021,
}
],
"id": 1,
},
"id": 1,
"title": "Bug caused by default value",
"year": 2021,
}
response = await client.get("/books_with_author/1")
assert response.json() == {
"author": {
"books": [
{
"author": {"id": 1},
"id": 1,
"title": "Bug caused by default value",
"year": 2021,
}
],
"country": {
"authors": [
{
"books": [
{
"author": {"id": 1},
"id": 1,
"title": "Bug caused by " "default value",
"year": 2021,
}
],
"country": {"id": 1},
"id": 1,
"name": "bug",
"rating": 5,
}
],
"id": 1,
},
"id": 1,
"name": "bug",
"rating": 5,
},
"id": 1,
"title": "Bug caused by default value",
"year": 2021,
}
collerek-ormar-c09209a/tests/test_fastapi/test_schema_not_allowed_params.py 0000664 0000000 0000000 00000001221 15130200524 0027400 0 ustar 00root root 0000000 0000000 import ormar
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
contents: str = ormar.Text()
create_test_database = init_tests(base_ormar_config)
def test_schema_not_allowed():
schema = Author.model_json_schema()
for field_schema in schema.get("properties").values():
for key in field_schema.keys():
assert "_" not in key, f"Found illegal field in openapi schema: {key}"
collerek-ormar-c09209a/tests/test_fastapi/test_skip_reverse_models.py 0000664 0000000 0000000 00000010662 15130200524 0026263 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
headers = {"content-type": "application/json"}
class Author(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=80)
last_name: str = ormar.String(max_length=80)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Category2(Category):
model_config = dict(extra="forbid")
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories = ormar.ManyToMany(Category, skip_reverse=True)
author: Optional[Author] = ormar.ForeignKey(Author, skip_reverse=True)
create_test_database = init_tests(base_ormar_config)
@app.post("/categories/forbid/", response_model=Category2)
async def create_category_forbid(category: Category2): # pragma: no cover
pass
@app.post("/categories/", response_model=Category)
async def create_category(category: Category):
await category.save()
await category.save_related(follow=True, save_all=True)
return category
@app.post("/posts/", response_model=Post)
async def create_post(post: Post):
if post.author:
await post.author.save()
await post.save()
await post.save_related(follow=True, save_all=True)
for category in [cat for cat in post.categories]:
await post.categories.add(category)
return post
@app.get("/categories/", response_model=List[Category])
async def get_categories():
return await Category.objects.select_related("posts").all()
@app.get("/posts/", response_model=List[Post])
async def get_posts():
posts = await Post.objects.select_related(["categories", "author"]).all()
return posts
@pytest.mark.asyncio
async def test_queries():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client as client, LifespanManager(app):
right_category = {"name": "Test category"}
wrong_category = {"name": "Test category2", "posts": [{"title": "Test Post"}]}
# cannot add posts if skipped, will be ignored (with extra=ignore by default)
response = await client.post(
"/categories/", json=wrong_category, headers=headers
)
assert response.status_code == 200
response = await client.get("/categories/")
assert response.status_code == 200
assert "posts" not in response.json()
categories = [Category(**x) for x in response.json()]
assert categories[0] is not None
assert categories[0].name == "Test category2"
response = await client.post(
"/categories/", json=right_category, headers=headers
)
assert response.status_code == 200
response = await client.get("/categories/")
assert response.status_code == 200
categories = [Category(**x) for x in response.json()]
assert categories[1] is not None
assert categories[1].name == "Test category"
right_post = {
"title": "ok post",
"author": {"first_name": "John", "last_name": "Smith"},
"categories": [{"name": "New cat"}],
}
response = await client.post("/posts/", json=right_post, headers=headers)
assert response.status_code == 200
Category.model_config["extra"] = "allow"
response = await client.get("/posts/")
assert response.status_code == 200
posts = [Post(**x) for x in response.json()]
assert posts[0].title == "ok post"
assert posts[0].author.first_name == "John"
assert posts[0].categories[0].name == "New cat"
wrong_category = {"name": "Test category3", "posts": [{"title": "Test Post"}]}
# cannot add posts if skipped, will be error with extra forbid
assert Category2.model_config["extra"] == "forbid"
response = await client.post("/categories/forbid/", json=wrong_category)
assert response.status_code == 422
collerek-ormar-c09209a/tests/test_fastapi/test_wekref_exclusion.py 0000664 0000000 0000000 00000010152 15130200524 0025565 0 ustar 00root root 0000000 0000000 from typing import List, Optional
from uuid import UUID, uuid4
import ormar
import pydantic
import pytest
from asgi_lifespan import LifespanManager
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from tests.lifespan import init_tests, lifespan
from tests.settings import create_config
base_ormar_config = create_config()
app = FastAPI(lifespan=lifespan(base_ormar_config))
class OtherThing(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="other_things")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
name: str = ormar.Text(default="")
ot_contents: str = ormar.Text(default="")
class Thing(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="things")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
name: str = ormar.Text(default="")
js: pydantic.Json = ormar.JSON(nullable=True)
other_thing: Optional[OtherThing] = ormar.ForeignKey(OtherThing, nullable=True)
create_test_database = init_tests(base_ormar_config)
@app.post("/test/1")
async def post_test_1():
# don't split initialization and attribute assignment
ot = await OtherThing(ot_contents="otc").save()
await Thing(other_thing=ot, name="t1").save()
await Thing(other_thing=ot, name="t2").save()
await Thing(other_thing=ot, name="t3").save()
# if you do not care about returned object you can even go with bulk_create
# all of them are created in one transaction
# things = [Thing(other_thing=ot, name='t1'),
# Thing(other_thing=ot, name="t2"),
# Thing(other_thing=ot, name="t3")]
# await Thing.objects.bulk_create(things)
@app.get("/test/2", response_model=List[Thing])
async def get_test_2():
# if you only query for one use get or first
ot = await OtherThing.objects.get()
ts = await ot.things.all()
# specifically null out the relation on things before return
for t in ts:
t.remove(ot, name="other_thing")
return ts
@app.get("/test/3", response_model=List[Thing])
async def get_test_3():
ot = await OtherThing.objects.select_related("things").get()
# exclude unwanted field while ot is still in scope
# in order not to pass it to fastapi
return [t.model_dump(exclude={"other_thing"}) for t in ot.things]
@app.get("/test/4", response_model=List[Thing], response_model_exclude={"other_thing"})
async def get_test_4():
ot = await OtherThing.objects.get()
# query from the active side
return await Thing.objects.all(other_thing=ot)
@app.get("/get_ot/", response_model=OtherThing)
async def get_ot():
return await OtherThing.objects.get()
# more real life (usually) is not getting some random OT and get it's Things
# but query for a specific one by some kind of id
@app.get(
"/test/5/{thing_id}",
response_model=List[Thing],
response_model_exclude={"other_thing"},
)
async def get_test_5(thing_id: UUID):
return await Thing.objects.all(other_thing__id=thing_id)
@app.get(
"/test/error", response_model=List[Thing], response_model_exclude={"other_thing"}
)
async def get_weakref():
ots = await OtherThing.objects.all()
ot = ots[0]
ts = await ot.things.all()
return ts
@pytest.mark.asyncio
async def test_endpoints():
transport = ASGITransport(app=app)
client = AsyncClient(transport=transport, base_url="http://testserver")
async with client, LifespanManager(app):
resp = await client.post("/test/1")
assert resp.status_code == 200
resp2 = await client.get("/test/2")
assert resp2.status_code == 200
assert len(resp2.json()) == 3
resp3 = await client.get("/test/3")
assert resp3.status_code == 200
assert len(resp3.json()) == 3
resp4 = await client.get("/test/4")
assert resp4.status_code == 200
assert len(resp4.json()) == 3
ot = OtherThing(**(await client.get("/get_ot/")).json())
resp5 = await client.get(f"/test/5/{ot.id}")
assert resp5.status_code == 200
assert len(resp5.json()) == 3
resp6 = await client.get("/test/error")
assert resp6.status_code == 200
assert len(resp6.json()) == 3
collerek-ormar-c09209a/tests/test_hashes/ 0000775 0000000 0000000 00000000000 15130200524 0020425 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_hashes/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022524 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_hashes/test_many_to_many.py 0000664 0000000 0000000 00000005675 15130200524 0024545 0 ustar 00root root 0000000 0000000 import sqlite3
from typing import ForwardRef, List, Optional
import asyncpg
import ormar
import pymysql
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category)
authors: Optional[List[Author]] = ormar.ManyToMany(
Author, through=ForwardRef("AuthorXPosts")
)
class AuthorXPosts(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="authors_x_posts", constraints=[ormar.UniqueColumns("author", "post")]
)
id: int = ormar.Integer(primary_key=True)
author: Optional[int] = ormar.Integer(default=None)
post: Optional[int] = ormar.Integer(default=None)
Post.update_forward_refs()
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(scope="function", autouse=True)
async def cleanup():
yield
async with base_ormar_config.database:
await Post.ormar_config.model_fields["categories"].through.objects.delete(
each=True
)
await Post.ormar_config.model_fields["authors"].through.objects.delete(
each=True
)
await Post.objects.delete(each=True)
await Category.objects.delete(each=True)
await Author.objects.delete(each=True)
@pytest.mark.asyncio
async def test_adding_same_m2m_model_twice():
async with base_ormar_config.database:
async with base_ormar_config.database:
post = await Post.objects.create(title="Hello, M2M")
news = await Category(name="News").save()
await post.categories.add(news)
await post.categories.add(news)
categories = await post.categories.all()
assert categories == [news]
@pytest.mark.asyncio
async def test_adding_same_m2m_model_twice_with_unique():
async with base_ormar_config.database:
async with base_ormar_config.database:
post = await Post.objects.create(title="Hello, M2M")
redactor = await Author(name="News").save()
await post.authors.add(redactor)
with pytest.raises(
(
sqlite3.IntegrityError,
pymysql.IntegrityError,
asyncpg.exceptions.UniqueViolationError,
)
):
await post.authors.add(redactor)
collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation/ 0000775 0000000 0000000 00000000000 15130200524 0026373 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0030472 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation/test_geting_pydantic_models.py0000664 0000000 0000000 00000025566 15130200524 0034535 0 ustar 00root root 0000000 0000000 from typing import ForwardRef, List, Optional
import ormar
import pydantic
from pydantic_core import PydanticUndefined
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class SelfRef(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="self_refs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="selfref")
parent = ormar.ForeignKey(ForwardRef("SelfRef"), related_name="children")
SelfRef.update_forward_refs()
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="test")
category: Optional[Category] = ormar.ForeignKey(Category, nullable=True)
class MutualA(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="mutual_a")
id: int = ormar.Integer(primary_key=True)
mutual_b = ormar.ForeignKey(ForwardRef("MutualB"), related_name="mutuals_a")
class MutualB(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="mutual_b")
id: int = ormar.Integer(primary_key=True)
name = ormar.String(max_length=100, default="test")
mutual_a = ormar.ForeignKey(MutualA, related_name="mutuals_b")
MutualA.update_forward_refs()
create_test_database = init_tests(base_ormar_config)
def test_getting_pydantic_model():
PydanticCategory = Category.get_pydantic()
assert issubclass(PydanticCategory, pydantic.BaseModel)
assert {*PydanticCategory.model_fields.keys()} == {"items", "id", "name"}
assert not PydanticCategory.model_fields["id"].is_required()
assert (
PydanticCategory.__pydantic_core_schema__["schema"]["fields"]["id"]["schema"][
"schema"
]["schema"]["type"]
== "int"
)
assert PydanticCategory.model_fields["id"].default is None
assert PydanticCategory.model_fields["name"].is_required()
assert (
PydanticCategory.__pydantic_core_schema__["schema"]["fields"]["name"]["schema"][
"type"
]
== "str"
)
assert PydanticCategory.model_fields["name"].default == PydanticUndefined
PydanticItem = PydanticCategory.__pydantic_core_schema__["schema"]["fields"][
"items"
]["schema"]["schema"]["schema"]["items_schema"]["cls"]
assert (
PydanticCategory.__pydantic_core_schema__["schema"]["fields"]["items"][
"schema"
]["schema"]["schema"]["type"]
== "list"
)
assert (
PydanticCategory.model_fields["items"].annotation
== Optional[List[PydanticItem]]
)
assert issubclass(PydanticItem, pydantic.BaseModel)
assert not PydanticItem.model_fields["name"].is_required()
assert PydanticItem.model_fields["name"].default == "test"
assert PydanticItem.model_fields["name"].annotation == Optional[str]
assert "category" not in PydanticItem.model_fields
def test_initializing_pydantic_model():
data = {
"id": 1,
"name": "test",
"items": [{"id": 1, "name": "test_i1"}, {"id": 2, "name": "test_i2"}],
}
PydanticCategory = Category.get_pydantic()
ormar_cat = Category(**data)
assert ormar_cat.model_dump() == data
cat = PydanticCategory(**data)
assert cat.model_dump() == data
data = {"id": 1, "name": "test"}
cat = PydanticCategory(**data)
assert cat.model_dump() == {**data, "items": None}
def test_getting_pydantic_model_include():
PydanticCategory = Category.get_pydantic(include={"id", "name"})
assert len(PydanticCategory.model_fields) == 2
assert "items" not in PydanticCategory.model_fields
def test_getting_pydantic_model_nested_include_set():
PydanticCategory = Category.get_pydantic(include={"id", "items__id"})
assert len(PydanticCategory.model_fields) == 2
assert "name" not in PydanticCategory.model_fields
PydanticItem = PydanticCategory.__pydantic_core_schema__["schema"]["fields"][
"items"
]["schema"]["schema"]["schema"]["items_schema"]["cls"]
assert len(PydanticItem.model_fields) == 1
assert "id" in PydanticItem.model_fields
def test_getting_pydantic_model_nested_include_dict():
PydanticCategory = Category.get_pydantic(include={"id": ..., "items": {"id"}})
assert len(PydanticCategory.model_fields) == 2
assert "name" not in PydanticCategory.model_fields
PydanticItem = PydanticCategory.__pydantic_core_schema__["schema"]["fields"][
"items"
]["schema"]["schema"]["schema"]["items_schema"]["cls"]
assert len(PydanticItem.model_fields) == 1
assert "id" in PydanticItem.model_fields
def test_getting_pydantic_model_nested_include_nested_dict():
PydanticCategory = Category.get_pydantic(include={"id": ..., "items": {"id": ...}})
assert len(PydanticCategory.model_fields) == 2
assert "name" not in PydanticCategory.model_fields
PydanticItem = PydanticCategory.__pydantic_core_schema__["schema"]["fields"][
"items"
]["schema"]["schema"]["schema"]["items_schema"]["cls"]
assert len(PydanticItem.model_fields) == 1
assert "id" in PydanticItem.model_fields
def test_getting_pydantic_model_include_exclude():
PydanticCategory = Category.get_pydantic(
include={"id": ..., "items": {"id", "name"}}, exclude={"items__name"}
)
assert len(PydanticCategory.model_fields) == 2
assert "name" not in PydanticCategory.model_fields
PydanticItem = PydanticCategory.__pydantic_core_schema__["schema"]["fields"][
"items"
]["schema"]["schema"]["schema"]["items_schema"]["cls"]
assert len(PydanticItem.model_fields) == 1
assert "id" in PydanticItem.model_fields
def test_getting_pydantic_model_exclude():
PydanticItem = Item.get_pydantic(exclude={"category__name"})
assert len(PydanticItem.model_fields) == 3
assert "category" in PydanticItem.model_fields
PydanticCategory = PydanticItem.__pydantic_core_schema__["schema"]["fields"][
"category"
]["schema"]["schema"]["schema"]["cls"]
assert len(PydanticCategory.model_fields) == 1
assert "name" not in PydanticCategory.model_fields
def test_getting_pydantic_model_exclude_dict():
PydanticItem = Item.get_pydantic(exclude={"id": ..., "category": {"name"}})
assert len(PydanticItem.model_fields) == 2
assert "category" in PydanticItem.model_fields
assert "id" not in PydanticItem.model_fields
PydanticCategory = PydanticItem.__pydantic_core_schema__["schema"]["fields"][
"category"
]["schema"]["schema"]["schema"]["cls"]
assert len(PydanticCategory.model_fields) == 1
assert "name" not in PydanticCategory.model_fields
def test_getting_pydantic_model_self_ref():
PydanticSelfRef = SelfRef.get_pydantic()
assert len(PydanticSelfRef.model_fields) == 4
assert set(PydanticSelfRef.model_fields.keys()) == {
"id",
"name",
"parent",
"children",
}
inner_self_ref_id = PydanticSelfRef.__pydantic_core_schema__["schema"]["schema"][
"fields"
]["parent"]["schema"]["schema"]["schema"]["schema_ref"]
InnerSelf = next(
(
x
for x in PydanticSelfRef.__pydantic_core_schema__["definitions"]
if x["ref"] == inner_self_ref_id
)
)["cls"]
assert len(InnerSelf.model_fields) == 2
assert set(InnerSelf.model_fields.keys()) == {"id", "name"}
inner_self_ref_id2 = PydanticSelfRef.__pydantic_core_schema__["schema"]["schema"][
"fields"
]["children"]["schema"]["schema"]["schema"]["items_schema"]["schema_ref"]
InnerSelf2 = next(
(
x
for x in PydanticSelfRef.__pydantic_core_schema__["definitions"]
if x["ref"] == inner_self_ref_id2
)
)["cls"]
assert len(InnerSelf2.model_fields) == 2
assert set(InnerSelf2.model_fields.keys()) == {"id", "name"}
def test_getting_pydantic_model_self_ref_exclude():
PydanticSelfRef = SelfRef.get_pydantic(exclude={"children": {"name"}})
assert len(PydanticSelfRef.model_fields) == 4
assert set(PydanticSelfRef.model_fields.keys()) == {
"id",
"name",
"parent",
"children",
}
InnerSelf = PydanticSelfRef.__pydantic_core_schema__["schema"]["fields"]["parent"][
"schema"
]["schema"]["schema"]["cls"]
assert len(InnerSelf.model_fields) == 2
assert set(InnerSelf.model_fields.keys()) == {"id", "name"}
# PydanticSelfRefChildren = PydanticSelfRef.model_fields["children"].type_
PydanticSelfRefChildren = PydanticSelfRef.__pydantic_core_schema__["schema"][
"fields"
]["children"]["schema"]["schema"]["schema"]["items_schema"]["cls"]
assert len(PydanticSelfRefChildren.model_fields) == 1
assert set(PydanticSelfRefChildren.model_fields.keys()) == {"id"}
assert PydanticSelfRef != PydanticSelfRefChildren
assert InnerSelf != PydanticSelfRefChildren
def test_getting_pydantic_model_mutual_rels():
MutualAPydantic = MutualA.get_pydantic()
assert len(MutualAPydantic.model_fields) == 3
assert set(MutualAPydantic.model_fields.keys()) == {"id", "mutual_b", "mutuals_b"}
mutual_ref_1 = MutualAPydantic.__pydantic_core_schema__["schema"]["schema"][
"fields"
]["mutual_b"]["schema"]["schema"]["schema"]["schema_ref"]
MutualB1 = next(
(
x
for x in MutualAPydantic.__pydantic_core_schema__["definitions"]
if x["ref"] == mutual_ref_1
)
)["cls"]
mutual_ref_2 = MutualAPydantic.__pydantic_core_schema__["schema"]["schema"][
"fields"
]["mutuals_b"]["schema"]["schema"]["schema"]["items_schema"]["schema_ref"]
MutualB2 = next(
(
x
for x in MutualAPydantic.__pydantic_core_schema__["definitions"]
if x["ref"] == mutual_ref_2
)
)["cls"]
assert len(MutualB1.model_fields) == 2
assert set(MutualB1.model_fields.keys()) == {"id", "name"}
assert len(MutualB2.model_fields) == 2
assert set(MutualB2.model_fields.keys()) == {"id", "name"}
assert MutualB1 == MutualB2
def test_getting_pydantic_model_mutual_rels_exclude():
MutualAPydantic = MutualA.get_pydantic(exclude={"mutual_b": {"name"}})
assert len(MutualAPydantic.model_fields) == 3
assert set(MutualAPydantic.model_fields.keys()) == {"id", "mutual_b", "mutuals_b"}
MutualB1 = MutualAPydantic.__pydantic_core_schema__["schema"]["fields"]["mutual_b"][
"schema"
]["schema"]["schema"]["cls"]
MutualB2 = MutualAPydantic.__pydantic_core_schema__["schema"]["fields"][
"mutuals_b"
]["schema"]["schema"]["schema"]["items_schema"]["cls"]
assert len(MutualB1.model_fields) == 1
assert set(MutualB1.model_fields.keys()) == {"id"}
assert len(MutualB2.model_fields) == 2
assert set(MutualB2.model_fields.keys()) == {"id", "name"}
assert MutualB1 != MutualB2
collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation/test_inheritance_concrete.py 0000664 0000000 0000000 00000045137 15130200524 0034171 0 ustar 00root root 0000000 0000000 import datetime
from collections import Counter
from typing import Optional
import ormar
import ormar.fields.constraints
import pydantic
import pytest
import sqlalchemy as sa
from ormar import ModelDefinitionError
from ormar.exceptions import ModelError
from ormar.models.metaclass import get_constraint_copy
from ormar.relations.relation_proxy import RelationProxy
from pydantic import computed_field
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class AuditModel(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
created_by: str = ormar.String(max_length=100)
updated_by: str = ormar.String(max_length=100, default="Sam")
@computed_field
def audit(self) -> str: # pragma: no cover
return f"{self.created_by} {self.updated_by}"
class DateFieldsModelNoSubclass(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_date_models")
date_id: int = ormar.Integer(primary_key=True)
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
updated_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class DateFieldsModel(ormar.Model):
ormar_config = base_ormar_config.copy(
abstract=True,
constraints=[
ormar.fields.constraints.UniqueColumns(
"creation_date",
"modification_date",
),
ormar.fields.constraints.CheckColumns(
"creation_date <= modification_date",
),
],
)
created_date: datetime.datetime = ormar.DateTime(
default=datetime.datetime.now, name="creation_date"
)
updated_date: datetime.datetime = ormar.DateTime(
default=datetime.datetime.now, name="modification_date"
)
class Category(DateFieldsModel, AuditModel):
ormar_config = base_ormar_config.copy(
tablename="categories",
constraints=[ormar.fields.constraints.UniqueColumns("name", "code")],
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
code: int = ormar.Integer()
@computed_field
def code_name(self) -> str:
return f"{self.code}:{self.name}"
@computed_field
def audit(self) -> str:
return f"{self.created_by} {self.updated_by}"
class Subject(DateFieldsModel):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
category: Optional[Category] = ormar.ForeignKey(Category)
class Person(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Car(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
owner: Person = ormar.ForeignKey(Person)
co_owner: Person = ormar.ForeignKey(Person, related_name="coowned")
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class Truck(Car):
ormar_config = ormar.OrmarConfig()
max_capacity: int = ormar.Integer()
class Bus(Car):
ormar_config = base_ormar_config.copy(tablename="buses")
owner: Person = ormar.ForeignKey(Person, related_name="buses")
max_persons: int = ormar.Integer()
class Car2(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
owner: Person = ormar.ForeignKey(Person, related_name="owned")
co_owners: RelationProxy[Person] = ormar.ManyToMany(Person, related_name="coowned")
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class Truck2(Car2):
ormar_config = base_ormar_config.copy(tablename="trucks2")
max_capacity: int = ormar.Integer()
class Bus2(Car2):
ormar_config = base_ormar_config.copy(tablename="buses2")
max_persons: int = ormar.Integer()
class ImmutablePerson(Person):
model_config = dict(frozen=True, validate_assignment=False)
create_test_database = init_tests(base_ormar_config)
def test_init_of_abstract_model() -> None:
with pytest.raises(ModelError):
DateFieldsModel()
def test_duplicated_related_name_on_different_model() -> None:
with pytest.raises(ModelDefinitionError):
class Bus3(Car2): # pragma: no cover
ormar_config = ormar.OrmarConfig(tablename="buses3")
owner: Person = ormar.ForeignKey(Person, related_name="buses")
max_persons: int = ormar.Integer()
def test_field_redefining_in_concrete_models() -> None:
class RedefinedField(DateFieldsModel):
ormar_config = base_ormar_config.copy(tablename="redefines")
id: int = ormar.Integer(primary_key=True)
created_date: str = ormar.String(
max_length=200,
name="creation_date",
) # type: ignore
changed_field = RedefinedField.ormar_config.model_fields["created_date"]
assert changed_field.ormar_default is None
assert changed_field.get_alias() == "creation_date"
assert any(
x.name == "creation_date" for x in RedefinedField.ormar_config.table.columns
)
assert isinstance(
RedefinedField.ormar_config.table.columns["creation_date"].type,
sa.sql.sqltypes.String,
)
def test_model_subclassing_that_redefines_constraints_column_names() -> None:
with pytest.raises(ModelDefinitionError):
class WrongField2(DateFieldsModel): # pragma: no cover
ormar_config = base_ormar_config.copy(tablename="wrongs")
id: int = ormar.Integer(primary_key=True)
created_date: str = ormar.String(max_length=200) # type: ignore
def test_model_subclassing_non_abstract_raises_error() -> None:
with pytest.raises(ModelDefinitionError):
class WrongField2(DateFieldsModelNoSubclass): # pragma: no cover
ormar_config = base_ormar_config.copy(tablename="wrongs")
id: int = ormar.Integer(primary_key=True)
def test_params_are_inherited() -> None:
assert Category.ormar_config.metadata == base_ormar_config.metadata
assert Category.ormar_config.database == base_ormar_config.database
assert len(Category.ormar_config.property_fields) == 2
constraints = Counter(map(lambda c: type(c), Category.ormar_config.constraints))
assert constraints[ormar.fields.constraints.UniqueColumns] == 2
assert constraints[ormar.fields.constraints.IndexColumns] == 0
assert constraints[ormar.fields.constraints.CheckColumns] == 1
def round_date_to_seconds(
date: datetime.datetime,
) -> datetime.datetime: # pragma: no cover
if date.microsecond >= 500000:
date = date + datetime.timedelta(seconds=1)
return date.replace(microsecond=0)
@pytest.mark.asyncio
async def test_fields_inherited_from_mixin() -> None:
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
cat = await Category(
name="Foo", code=123, created_by="Sam", updated_by="Max"
).save()
sub = await Subject(name="Bar", category=cat).save()
mixin_columns = ["created_date", "updated_date"]
mixin_db_columns = ["creation_date", "modification_date"]
mixin2_columns = ["created_by", "updated_by"]
assert all(
field in Category.ormar_config.model_fields for field in mixin_columns
)
assert cat.created_date is not None
assert cat.updated_date is not None
assert all(
field in Subject.ormar_config.model_fields for field in mixin_columns
)
assert cat.code_name == "123:Foo"
assert cat.audit == "Sam Max"
assert sub.created_date is not None
assert sub.updated_date is not None
assert all(
field in Category.ormar_config.model_fields for field in mixin2_columns
)
assert all(
field not in Subject.ormar_config.model_fields
for field in mixin2_columns
)
inspector = sa.inspect(base_ormar_config.engine)
assert "categories" in inspector.get_table_names()
table_columns = [x.get("name") for x in inspector.get_columns("categories")]
assert all(
col in table_columns for col in mixin_db_columns
) # + mixin2_columns)
assert "subjects" in inspector.get_table_names()
table_columns = [x.get("name") for x in inspector.get_columns("subjects")]
assert all(col in table_columns for col in mixin_db_columns)
sub2 = (
await Subject.objects.select_related("category")
.order_by("-created_date")
.exclude_fields("updated_date")
.get()
)
assert round_date_to_seconds(sub2.created_date) == round_date_to_seconds(
sub.created_date
)
assert sub2.category is not None
assert sub2.category.updated_date is not None
assert round_date_to_seconds(
sub2.category.created_date
) == round_date_to_seconds(cat.created_date)
assert sub2.updated_date is None
assert sub2.category.created_by == "Sam"
assert sub2.category.updated_by == cat.updated_by
sub3 = (
await Subject.objects.prefetch_related("category")
.order_by("-created_date")
.exclude_fields({"updated_date": ..., "category": {"updated_date"}})
.get()
)
assert round_date_to_seconds(sub3.created_date) == round_date_to_seconds(
sub.created_date
)
assert sub3.category.updated_date is None
assert round_date_to_seconds(
sub3.category.created_date
) == round_date_to_seconds(cat.created_date)
assert sub3.updated_date is None
assert sub3.category.created_by == "Sam"
assert sub3.category.updated_by == cat.updated_by
@pytest.mark.asyncio
async def test_inheritance_with_relation() -> None:
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
sam = await Person(name="Sam").save()
joe = await Person(name="Joe").save()
await Truck(
name="Shelby wanna be", max_capacity=1400, owner=sam, co_owner=joe
).save()
await Bus(name="Unicorn", max_persons=50, owner=sam, co_owner=joe).save()
shelby = await Truck.objects.select_related(["owner", "co_owner"]).get()
assert shelby.name == "Shelby wanna be"
assert shelby.owner.name == "Sam"
assert shelby.co_owner.name == "Joe"
assert shelby.max_capacity == 1400
unicorn = await Bus.objects.select_related(["owner", "co_owner"]).get()
assert unicorn.name == "Unicorn"
assert unicorn.owner.name == "Sam"
assert unicorn.co_owner.name == "Joe"
assert unicorn.max_persons == 50
joe_check = await Person.objects.select_related(
["coowned_trucks", "coowned_buses"]
).get(name="Joe")
assert joe_check.pk == joe.pk
assert joe_check.coowned_trucks[0] == shelby
assert joe_check.coowned_trucks[0].created_date is not None
assert joe_check.coowned_buses[0] == unicorn
assert joe_check.coowned_buses[0].created_date is not None
joe_check = (
await Person.objects.exclude_fields(
{
"coowned_trucks": {"created_date"},
"coowned_buses": {"created_date"},
}
)
.prefetch_related(["coowned_trucks", "coowned_buses"])
.get(name="Joe")
)
assert joe_check.pk == joe.pk
assert joe_check.coowned_trucks[0] == shelby
assert joe_check.coowned_trucks[0].created_date is None
assert joe_check.coowned_buses[0] == unicorn
assert joe_check.coowned_buses[0].created_date is None
@pytest.mark.asyncio
async def test_inheritance_with_multi_relation() -> None:
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
sam = await Person(name="Sam").save()
joe = await Person(name="Joe").save()
alex = await Person(name="Alex").save()
truck = await Truck2(
name="Shelby wanna be 2", max_capacity=1400, owner=sam
).save()
await truck.co_owners.add(joe)
await truck.co_owners.add(alex)
bus3 = await Bus2(name="Unicorn 3", max_persons=30, owner=joe).save()
await bus3.co_owners.add(sam)
bus = await Bus2(name="Unicorn 2", max_persons=50, owner=sam).save()
await bus.co_owners.add(joe)
await bus.co_owners.add(alex)
shelby = await Truck2.objects.select_related(["owner", "co_owners"]).get()
assert shelby.name == "Shelby wanna be 2"
assert shelby.owner.name == "Sam"
assert shelby.co_owners[0].name == "Joe"
assert len(shelby.co_owners) == 2
assert shelby.max_capacity == 1400
unicorn = await Bus2.objects.select_related(["owner", "co_owners"]).get(
name="Unicorn 2"
)
assert unicorn.name == "Unicorn 2"
assert unicorn.owner.name == "Sam"
assert unicorn.co_owners[0].name == "Joe"
assert len(unicorn.co_owners) == 2
assert unicorn.max_persons == 50
unicorn = (
await Bus2.objects.select_related(["owner", "co_owners"])
.order_by("-co_owners__name")
.get()
)
assert unicorn.name == "Unicorn 2"
assert unicorn.owner.name == "Sam"
assert len(unicorn.co_owners) == 2
assert unicorn.co_owners[0].name == "Joe"
unicorn = (
await Bus2.objects.select_related(["owner", "co_owners"])
.order_by("co_owners__name")
.get()
)
assert unicorn.name == "Unicorn 2"
assert unicorn.owner.name == "Sam"
assert len(unicorn.co_owners) == 2
assert unicorn.co_owners[0].name == "Alex"
joe_check = await Person.objects.select_related(
["coowned_trucks2", "coowned_buses2"]
).get(name="Joe")
assert joe_check.pk == joe.pk
assert joe_check.coowned_trucks2[0] == shelby
assert joe_check.coowned_trucks2[0].created_date is not None
assert joe_check.coowned_buses2[0] == unicorn
assert joe_check.coowned_buses2[0].created_date is not None
joe_check = (
await Person.objects.exclude_fields(
{
"coowned_trucks2": {"created_date"},
"coowned_buses2": {"created_date"},
}
)
.prefetch_related(["coowned_trucks2", "coowned_buses2"])
.get(name="Joe")
)
assert joe_check.pk == joe.pk
assert joe_check.coowned_trucks2[0] == shelby
assert joe_check.coowned_trucks2[0].created_date is None
assert joe_check.coowned_buses2[0] == unicorn
assert joe_check.coowned_buses2[0].created_date is None
await shelby.co_owners.remove(joe)
await shelby.co_owners.remove(alex)
await Truck2.objects.delete(name="Shelby wanna be 2")
unicorn = (
await Bus2.objects.select_related(["owner", "co_owners"])
.filter(co_owners__name="Joe")
.get()
)
assert unicorn.name == "Unicorn 2"
assert unicorn.owner.name == "Sam"
assert unicorn.co_owners[0].name == "Joe"
assert len(unicorn.co_owners) == 1
assert unicorn.max_persons == 50
unicorn = (
await Bus2.objects.select_related(["owner", "co_owners"])
.exclude(co_owners__name="Joe")
.get()
)
assert unicorn.name == "Unicorn 2"
assert unicorn.owner.name == "Sam"
assert unicorn.co_owners[0].name == "Alex"
assert len(unicorn.co_owners) == 1
assert unicorn.max_persons == 50
unicorn = await Bus2.objects.get()
assert unicorn.name == "Unicorn 2"
assert unicorn.owner.name is None
assert len(unicorn.co_owners) == 0
await unicorn.co_owners.all()
assert len(unicorn.co_owners) == 2
assert unicorn.co_owners[0].name == "Joe"
await unicorn.owner.load()
assert unicorn.owner.name == "Sam"
unicorns = (
await Bus2.objects.select_related(["owner", "co_owners"])
.filter(name__contains="Unicorn")
.order_by("-name")
.all()
)
assert unicorns[0].name == "Unicorn 3"
assert unicorns[0].owner.name == "Joe"
assert len(unicorns[0].co_owners) == 1
assert unicorns[0].co_owners[0].name == "Sam"
assert unicorns[1].name == "Unicorn 2"
assert unicorns[1].owner.name == "Sam"
assert len(unicorns[1].co_owners) == 2
assert unicorns[1].co_owners[0].name == "Joe"
unicorns = (
await Bus2.objects.select_related(["owner", "co_owners"])
.filter(name__contains="Unicorn")
.order_by("-name")
.limit(2, limit_raw_sql=True)
.all()
)
assert len(unicorns) == 2
assert unicorns[1].name == "Unicorn 2"
assert len(unicorns[1].co_owners) == 1
def test_custom_config() -> None:
# Custom config inherits defaults
assert ImmutablePerson.model_config["from_attributes"] is True
# Custom config can override defaults
assert ImmutablePerson.model_config["validate_assignment"] is False
sam = ImmutablePerson(name="Sam")
with pytest.raises(pydantic.ValidationError):
sam.name = "Not Sam"
def test_get_constraint_copy() -> None:
with pytest.raises(ValueError):
get_constraint_copy("INVALID CONSTRAINT") # type: ignore
collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation/test_inheritance_mixins.py 0000664 0000000 0000000 00000012354 15130200524 0033671 0 ustar 00root root 0000000 0000000 import datetime
from typing import Optional
import ormar
import pytest
import sqlalchemy as sa
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class AuditMixin:
created_by: str = ormar.String(max_length=100)
updated_by: str = ormar.String(max_length=100, default="Sam")
class DateFieldsMixins:
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
updated_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
class Category(ormar.Model, DateFieldsMixins, AuditMixin):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
code: int = ormar.Integer()
class Subject(ormar.Model, DateFieldsMixins):
ormar_config = base_ormar_config.copy(tablename="subjects")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
category: Optional[Category] = ormar.ForeignKey(Category)
create_test_database = init_tests(base_ormar_config)
def test_field_redefining() -> None:
class RedefinedField(ormar.Model, DateFieldsMixins):
ormar_config = base_ormar_config.copy(tablename="redefined")
id: int = ormar.Integer(primary_key=True)
created_date: datetime.datetime = ormar.DateTime(name="creation_date")
assert (
RedefinedField.ormar_config.model_fields["created_date"].ormar_default is None
)
assert (
RedefinedField.ormar_config.model_fields["created_date"].get_alias()
== "creation_date"
)
assert any(
x.name == "creation_date" for x in RedefinedField.ormar_config.table.columns
)
def test_field_redefining_in_second() -> None:
class RedefinedField2(ormar.Model, DateFieldsMixins):
ormar_config = base_ormar_config.copy(tablename="redefines2")
id: int = ormar.Integer(primary_key=True)
created_date: str = ormar.String(
max_length=200,
name="creation_date",
) # type: ignore
assert (
RedefinedField2.ormar_config.model_fields["created_date"].ormar_default is None
)
assert (
RedefinedField2.ormar_config.model_fields["created_date"].get_alias()
== "creation_date"
)
assert any(
x.name == "creation_date" for x in RedefinedField2.ormar_config.table.columns
)
assert isinstance(
RedefinedField2.ormar_config.table.columns["creation_date"].type,
sa.sql.sqltypes.String,
)
def round_date_to_seconds(
date: datetime.datetime,
) -> datetime.datetime: # pragma: no cover
if date.microsecond >= 500000:
date = date + datetime.timedelta(seconds=1)
return date.replace(microsecond=0)
@pytest.mark.asyncio
async def test_fields_inherited_from_mixin() -> None:
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
cat = await Category(
name="Foo", code=123, created_by="Sam", updated_by="Max"
).save()
sub = await Subject(name="Bar", category=cat).save()
mixin_columns = ["created_date", "updated_date"]
mixin2_columns = ["created_by", "updated_by"]
assert all(
field in Category.ormar_config.model_fields for field in mixin_columns
)
assert cat.created_date is not None
assert cat.updated_date is not None
assert all(
field in Subject.ormar_config.model_fields for field in mixin_columns
)
assert sub.created_date is not None
assert sub.updated_date is not None
assert all(
field in Category.ormar_config.model_fields for field in mixin2_columns
)
assert all(
field not in Subject.ormar_config.model_fields
for field in mixin2_columns
)
inspector = sa.inspect(base_ormar_config.engine)
assert "categories" in inspector.get_table_names()
table_columns = [x.get("name") for x in inspector.get_columns("categories")]
assert all(col in table_columns for col in mixin_columns + mixin2_columns)
assert "subjects" in inspector.get_table_names()
table_columns = [x.get("name") for x in inspector.get_columns("subjects")]
assert all(col in table_columns for col in mixin_columns)
sub2 = (
await Subject.objects.select_related("category")
.order_by("-created_date")
.exclude_fields("updated_date")
.get()
)
assert round_date_to_seconds(sub2.created_date) == round_date_to_seconds(
sub.created_date
)
assert sub2.category is not None
assert sub2.category.updated_date is not None
assert round_date_to_seconds(
sub2.category.created_date
) == round_date_to_seconds(cat.created_date)
assert sub2.updated_date is None
assert sub2.category.created_by == "Sam"
assert sub2.category.updated_by == cat.updated_by
test_inheritance_of_property_fields.py 0000664 0000000 0000000 00000002432 15130200524 0036175 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation import ormar
from pydantic import computed_field
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class BaseFoo(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
name: str = ormar.String(max_length=100)
@computed_field
def prefixed_name(self) -> str:
return "prefix_" + self.name
class Foo(BaseFoo):
ormar_config = base_ormar_config.copy()
@computed_field
def double_prefixed_name(self) -> str:
return "prefix2_" + self.name
id: int = ormar.Integer(primary_key=True)
class Bar(BaseFoo):
ormar_config = base_ormar_config.copy()
@computed_field
def prefixed_name(self) -> str:
return "baz_" + self.name
id: int = ormar.Integer(primary_key=True)
create_test_database = init_tests(base_ormar_config)
def test_property_fields_are_inherited():
foo = Foo(name="foo")
assert foo.prefixed_name == "prefix_foo"
assert foo.model_dump() == {
"name": "foo",
"id": None,
"double_prefixed_name": "prefix2_foo",
"prefixed_name": "prefix_foo",
}
bar = Bar(name="bar")
assert bar.prefixed_name == "baz_bar"
assert bar.model_dump() == {"name": "bar", "id": None, "prefixed_name": "baz_bar"}
test_inheritance_with_default.py 0000664 0000000 0000000 00000002756 15130200524 0034767 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation import datetime
import uuid
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class BaseModel(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: uuid.UUID = ormar.UUID(
primary_key=True, default=uuid.uuid4, uuid_format="string"
)
created_at: datetime.datetime = ormar.DateTime(default=datetime.datetime.utcnow())
updated_at: datetime.datetime = ormar.DateTime(default=datetime.datetime.utcnow())
class Member(BaseModel):
ormar_config = base_ormar_config.copy(tablename="members")
first_name: str = ormar.String(max_length=50)
last_name: str = ormar.String(max_length=50)
create_test_database = init_tests(base_ormar_config)
def test_model_structure():
assert "id" in BaseModel.model_fields
assert "id" in BaseModel.ormar_config.model_fields
assert BaseModel.ormar_config.model_fields["id"].has_default()
assert BaseModel.model_fields["id"].default_factory is not None
assert "id" in Member.model_fields
assert "id" in Member.ormar_config.model_fields
assert Member.ormar_config.model_fields["id"].has_default()
assert Member.model_fields["id"].default_factory is not None
@pytest.mark.asyncio
async def test_fields_inherited_with_default():
async with base_ormar_config.database:
await Member(first_name="foo", last_name="bar").save()
await Member.objects.create(first_name="foo", last_name="bar")
test_inherited_class_is_not_abstract_by_default.py 0000664 0000000 0000000 00000002472 15130200524 0040526 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation import datetime
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class TableBase(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: int = ormar.Integer(primary_key=True)
created_by: str = ormar.String(max_length=20, default="test")
created_at: datetime.datetime = ormar.DateTime(
timezone=True, default=datetime.datetime.now
)
last_modified_by: str = ormar.String(max_length=20, nullable=True)
last_modified_at: datetime.datetime = ormar.DateTime(timezone=True, nullable=True)
class NationBase(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
name: str = ormar.String(max_length=50)
alpha2_code: str = ormar.String(max_length=2)
region: str = ormar.String(max_length=30)
subregion: str = ormar.String(max_length=30)
class Nation(NationBase, TableBase):
ormar_config = base_ormar_config.copy()
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_model_is_not_abstract_by_default():
async with base_ormar_config.database:
sweden = await Nation(
name="Sweden", alpha2_code="SE", region="Europe", subregion="Scandinavia"
).save()
assert sweden.id is not None
collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation/test_nested_models_pydantic.py0000664 0000000 0000000 00000004020 15130200524 0034520 0 ustar 00root root 0000000 0000000 import ormar
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Library(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Package(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
library: Library = ormar.ForeignKey(Library, related_name="packages")
version: str = ormar.String(max_length=100)
class Ticket(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
number: int = ormar.Integer()
status: str = ormar.String(max_length=100)
class TicketPackage(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
status: str = ormar.String(max_length=100)
ticket: Ticket = ormar.ForeignKey(Ticket, related_name="packages")
package: Package = ormar.ForeignKey(Package, related_name="tickets")
create_test_database = init_tests(base_ormar_config)
def test_have_proper_children():
TicketPackageOut = TicketPackage.get_pydantic(exclude={"ticket"})
assert "package" in TicketPackageOut.model_fields
PydanticPackage = TicketPackageOut.__pydantic_core_schema__["schema"]["fields"][
"package"
]["schema"]["schema"]["schema"]["cls"]
assert "library" in PydanticPackage.model_fields
def test_casts_properly():
payload = {
"id": 0,
"status": "string",
"ticket": {"id": 0, "number": 0, "status": "string"},
"package": {
"version": "string",
"id": 0,
"library": {"id": 0, "name": "string"},
},
}
test_package = TicketPackage(**payload)
TicketPackageOut = TicketPackage.get_pydantic(exclude={"ticket"})
parsed = TicketPackageOut(**test_package.model_dump()).model_dump()
assert "ticket" not in parsed
assert "package" in parsed
assert "library" in parsed.get("package")
collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation/test_pydantic_fields_order.py 0000664 0000000 0000000 00000001245 15130200524 0034342 0 ustar 00root root 0000000 0000000 import ormar
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class NewTestModel(ormar.Model):
ormar_config = base_ormar_config.copy()
a: int = ormar.Integer(primary_key=True)
b: str = ormar.String(max_length=1)
c: str = ormar.String(max_length=1)
d: str = ormar.String(max_length=1)
e: str = ormar.String(max_length=1)
f: str = ormar.String(max_length=1)
create_test_database = init_tests(base_ormar_config)
def test_model_field_order():
TestCreate = NewTestModel.get_pydantic(exclude={"a"})
assert list(TestCreate.model_fields.keys()) == ["b", "c", "d", "e", "f"]
test_validators_are_inherited.py 0000664 0000000 0000000 00000003345 15130200524 0034764 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation import enum
import ormar
import pytest
from pydantic import ValidationError, field_validator
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class BaseModel(ormar.Model):
ormar_config = base_ormar_config.copy(abstract=True)
id: int = ormar.Integer(primary_key=True)
str_field: str = ormar.String(min_length=5, max_length=10, nullable=False)
@field_validator("str_field")
def validate_str_field(cls, v):
if " " not in v:
raise ValueError("must contain a space")
return v
class EnumExample(str, enum.Enum):
A = "A"
B = "B"
C = "C"
class ModelExample(BaseModel):
ormar_config = base_ormar_config.copy(tablename="examples")
enum_field: str = ormar.Enum(enum_class=EnumExample, nullable=False)
ModelExampleCreate = ModelExample.get_pydantic(exclude={"id"})
create_test_database = init_tests(base_ormar_config)
def test_ormar_validator():
ModelExample(str_field="a aaaaaa", enum_field="A")
with pytest.raises(ValidationError) as e:
ModelExample(str_field="aaaaaaa", enum_field="A")
assert "must contain a space" in str(e)
with pytest.raises(ValidationError) as e:
ModelExample(str_field="a aaaaaaa", enum_field="Z")
assert "Input should be 'A', 'B' or 'C'" in str(e)
def test_pydantic_validator():
ModelExampleCreate(str_field="a aaaaaa", enum_field="A")
with pytest.raises(ValidationError) as e:
ModelExampleCreate(str_field="aaaaaaa", enum_field="A")
assert "must contain a space" in str(e)
with pytest.raises(ValidationError) as e:
ModelExampleCreate(str_field="a aaaaaaa", enum_field="Z")
assert "Input should be 'A', 'B' or 'C'" in str(e)
test_validators_in_generated_pydantic.py 0000664 0000000 0000000 00000003215 15130200524 0036475 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_inheritance_and_pydantic_generation import enum
import ormar
import pytest
from pydantic import ValidationError, field_validator
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class EnumExample(str, enum.Enum):
A = "A"
B = "B"
C = "C"
class ModelExample(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="examples")
id: int = ormar.Integer(primary_key=True)
str_field: str = ormar.String(min_length=5, max_length=10, nullable=False)
enum_field: str = ormar.Enum(nullable=False, enum_class=EnumExample)
@field_validator("str_field")
def validate_str_field(cls, v):
if " " not in v:
raise ValueError("must contain a space")
return v
ModelExampleCreate = ModelExample.get_pydantic(exclude={"id"})
create_test_database = init_tests(base_ormar_config)
def test_ormar_validator():
ModelExample(str_field="a aaaaaa", enum_field="A")
with pytest.raises(ValidationError) as e:
ModelExample(str_field="aaaaaaa", enum_field="A")
assert "must contain a space" in str(e)
with pytest.raises(ValidationError) as e:
ModelExample(str_field="a aaaaaaa", enum_field="Z")
assert "Input should be 'A', 'B' or 'C'" in str(e)
def test_pydantic_validator():
ModelExampleCreate(str_field="a aaaaaa", enum_field="A")
with pytest.raises(ValidationError) as e:
ModelExampleCreate(str_field="aaaaaaa", enum_field="A")
assert "must contain a space" in str(e)
with pytest.raises(ValidationError) as e:
ModelExampleCreate(str_field="a aaaaaaa", enum_field="Z")
assert "Input should be 'A', 'B' or 'C'" in str(e)
collerek-ormar-c09209a/tests/test_meta_constraints/ 0000775 0000000 0000000 00000000000 15130200524 0022527 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_meta_constraints/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0024626 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_meta_constraints/test_check_constraints.py 0000664 0000000 0000000 00000002720 15130200524 0027645 0 ustar 00root root 0000000 0000000 import sqlite3
import asyncpg # type: ignore
import ormar.fields.constraints
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Product(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="products",
constraints=[
ormar.fields.constraints.CheckColumns("inventory > buffer"),
],
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
company: str = ormar.String(max_length=200)
inventory: int = ormar.Integer()
buffer: int = ormar.Integer()
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_check_columns_exclude_mysql():
if Product.ormar_config.database._backend._dialect.name != "mysql":
async with base_ormar_config.database: # pragma: no cover
async with base_ormar_config.database.transaction(force_rollback=True):
await Product.objects.create(
name="Mars", company="Nestle", inventory=100, buffer=10
)
with pytest.raises(
(
sqlite3.IntegrityError,
asyncpg.exceptions.CheckViolationError,
)
):
await Product.objects.create(
name="Cookies", company="Nestle", inventory=1, buffer=10
)
collerek-ormar-c09209a/tests/test_meta_constraints/test_index_constraints.py 0000664 0000000 0000000 00000003730 15130200524 0027701 0 ustar 00root root 0000000 0000000 import ormar.fields.constraints
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Product(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="products",
constraints=[
ormar.fields.constraints.IndexColumns("company", "name", name="my_index"),
ormar.fields.constraints.IndexColumns("location", "company_type"),
],
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
company: str = ormar.String(max_length=200)
location: str = ormar.String(max_length=200)
company_type: str = ormar.String(max_length=200)
create_test_database = init_tests(base_ormar_config)
def test_table_structure():
assert len(Product.ormar_config.table.indexes) > 0
indexes = sorted(
list(Product.ormar_config.table.indexes), key=lambda x: x.name, reverse=True
)
test_index = indexes[0]
assert test_index.name == "my_index"
assert [col.name for col in test_index.columns] == ["company", "name"]
test_index = indexes[1]
assert test_index.name == "ix_products_location_company_type"
assert [col.name for col in test_index.columns] == ["location", "company_type"]
@pytest.mark.asyncio
async def test_index_is_not_unique():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await Product.objects.create(
name="Cookies", company="Nestle", location="A", company_type="B"
)
await Product.objects.create(
name="Mars", company="Mars", location="B", company_type="Z"
)
await Product.objects.create(
name="Mars", company="Nestle", location="C", company_type="X"
)
await Product.objects.create(
name="Mars", company="Mars", location="D", company_type="Y"
)
collerek-ormar-c09209a/tests/test_meta_constraints/test_unique_constraints.py 0000664 0000000 0000000 00000002441 15130200524 0030076 0 ustar 00root root 0000000 0000000 import sqlite3
import asyncpg # type: ignore
import ormar.fields.constraints
import pymysql
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Product(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="products",
constraints=[ormar.fields.constraints.UniqueColumns("name", "company")],
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
company: str = ormar.String(max_length=200)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_unique_columns():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await Product.objects.create(name="Cookies", company="Nestle")
await Product.objects.create(name="Mars", company="Mars")
await Product.objects.create(name="Mars", company="Nestle")
with pytest.raises(
(
sqlite3.IntegrityError,
pymysql.IntegrityError,
asyncpg.exceptions.UniqueViolationError,
)
):
await Product.objects.create(name="Mars", company="Mars")
collerek-ormar-c09209a/tests/test_model_definition/ 0000775 0000000 0000000 00000000000 15130200524 0022462 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_model_definition/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0024561 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_model_definition/pks_and_fks/ 0000775 0000000 0000000 00000000000 15130200524 0024744 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_model_definition/pks_and_fks/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0027043 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_model_definition/pks_and_fks/test_non_integer_pkey.py 0000664 0000000 0000000 00000002140 15130200524 0031711 0 ustar 00root root 0000000 0000000 import random
import databases
import ormar
import pytest
import sqlalchemy
from tests.settings import DATABASE_URL
database = databases.Database(DATABASE_URL, force_rollback=True)
metadata = sqlalchemy.MetaData()
def key():
return "".join(random.choice("abcdefgh123456") for _ in range(8))
class Model(ormar.Model):
ormar_config = ormar.OrmarConfig(
tablename="models",
metadata=metadata,
database=database,
)
id: str = ormar.String(primary_key=True, default=key, max_length=8)
name: str = ormar.String(max_length=32)
@pytest.fixture(autouse=True, scope="function")
def create_test_database():
engine = sqlalchemy.create_engine(DATABASE_URL)
metadata.create_all(engine)
yield
metadata.drop_all(engine)
@pytest.mark.asyncio
async def test_pk_1():
async with database:
model = await Model.objects.create(name="NAME")
assert isinstance(model.id, str)
@pytest.mark.asyncio
async def test_pk_2():
async with database:
model = await Model.objects.create(name="NAME")
assert await Model.objects.all() == [model]
collerek-ormar-c09209a/tests/test_model_definition/pks_and_fks/test_saving_string_pks.py 0000664 0000000 0000000 00000004060 15130200524 0032107 0 ustar 00root root 0000000 0000000 from random import choice
from string import ascii_uppercase
import databases
import ormar
import pytest
import pytest_asyncio
import sqlalchemy
from ormar import Float, String
from sqlalchemy import create_engine
from tests.settings import DATABASE_URL
database = databases.Database(DATABASE_URL, force_rollback=True)
metadata = sqlalchemy.MetaData()
def get_id() -> str:
return "".join(choice(ascii_uppercase) for _ in range(12))
base_ormar_config = ormar.OrmarConfig(
metadata=metadata,
database=database,
)
class PositionOrm(ormar.Model):
ormar_config = base_ormar_config.copy()
name: str = String(primary_key=True, max_length=50)
x: float = Float()
y: float = Float()
degrees: float = Float()
class PositionOrmDef(ormar.Model):
ormar_config = base_ormar_config.copy()
name: str = String(primary_key=True, max_length=50, default=get_id)
x: float = Float()
y: float = Float()
degrees: float = Float()
@pytest.fixture(autouse=True, scope="module")
def create_test_database():
engine = create_engine(DATABASE_URL)
metadata.create_all(engine)
yield
metadata.drop_all(engine)
@pytest_asyncio.fixture(scope="function")
async def cleanup():
yield
async with database:
await PositionOrm.objects.delete(each=True)
await PositionOrmDef.objects.delete(each=True)
@pytest.mark.asyncio
async def test_creating_a_position(cleanup):
async with database:
instance = PositionOrm(name="my_pos", x=1.0, y=2.0, degrees=3.0)
await instance.save()
assert instance.saved
assert instance.name == "my_pos"
instance2 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0)
await instance2.save()
assert instance2.saved
assert instance2.name is not None
assert len(instance2.name) == 12
instance3 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0)
await instance3.save()
assert instance3.saved
assert instance3.name is not None
assert len(instance3.name) == 12
assert instance2.name != instance3.name
collerek-ormar-c09209a/tests/test_model_definition/pks_and_fks/test_uuid_fks.py 0000664 0000000 0000000 00000003544 15130200524 0030174 0 ustar 00root root 0000000 0000000 import uuid
import databases
import ormar
import pytest
import sqlalchemy
from sqlalchemy import create_engine
from tests.settings import DATABASE_URL
metadata = sqlalchemy.MetaData()
db = databases.Database(DATABASE_URL)
class User(ormar.Model):
ormar_config = ormar.OrmarConfig(
tablename="user",
metadata=metadata,
database=db,
)
id: uuid.UUID = ormar.UUID(
primary_key=True, default=uuid.uuid4, uuid_format="string"
)
username = ormar.String(index=True, unique=True, null=False, max_length=255)
email = ormar.String(index=True, unique=True, nullable=False, max_length=255)
hashed_password = ormar.String(null=False, max_length=255)
is_active = ormar.Boolean(default=True, nullable=False)
is_superuser = ormar.Boolean(default=False, nullable=False)
class Token(ormar.Model):
ormar_config = ormar.OrmarConfig(
tablename="token",
metadata=metadata,
database=db,
)
id = ormar.Integer(primary_key=True)
text = ormar.String(max_length=4, unique=True)
user = ormar.ForeignKey(User, related_name="tokens")
created_at = ormar.DateTime(server_default=sqlalchemy.func.now())
@pytest.fixture(autouse=True, scope="module")
def create_test_database():
engine = create_engine(DATABASE_URL)
metadata.create_all(engine)
yield
metadata.drop_all(engine)
@pytest.mark.asyncio
async def test_uuid_fk():
async with db:
async with db.transaction(force_rollback=True):
user = await User.objects.create(
username="User1",
email="email@example.com",
hashed_password="^$EDACVS(&A&Y@2131aa",
is_active=True,
is_superuser=False,
)
await Token.objects.create(text="AAAA", user=user)
await Token.objects.order_by("-created_at").all()
collerek-ormar-c09209a/tests/test_model_definition/test_aliases.py 0000664 0000000 0000000 00000013407 15130200524 0025521 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Child(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="children")
id: int = ormar.Integer(name="child_id", primary_key=True)
first_name: str = ormar.String(name="fname", max_length=100)
last_name: str = ormar.String(name="lname", max_length=100)
born_year: int = ormar.Integer(name="year_born", nullable=True)
class Artist(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="artists")
id: int = ormar.Integer(name="artist_id", primary_key=True)
first_name: str = ormar.String(name="fname", max_length=100)
last_name: str = ormar.String(name="lname", max_length=100)
born_year: int = ormar.Integer(name="year")
children: Optional[List[Child]] = ormar.ManyToMany(Child)
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="music_albums")
id: int = ormar.Integer(name="album_id", primary_key=True)
name: str = ormar.String(name="album_name", max_length=100)
artist: Optional[Artist] = ormar.ForeignKey(Artist, name="artist_id")
create_test_database = init_tests(base_ormar_config)
def test_table_structure():
assert "album_id" in [x.name for x in Album.ormar_config.table.columns]
assert "album_name" in [x.name for x in Album.ormar_config.table.columns]
assert "fname" in [x.name for x in Artist.ormar_config.table.columns]
assert "lname" in [x.name for x in Artist.ormar_config.table.columns]
assert "year" in [x.name for x in Artist.ormar_config.table.columns]
@pytest.mark.asyncio
async def test_working_with_aliases():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
artist = await Artist.objects.create(
first_name="Ted", last_name="Mosbey", born_year=1975
)
await Album.objects.create(name="Aunt Robin", artist=artist)
await artist.children.create(
first_name="Son", last_name="1", born_year=1990
)
await artist.children.create(
first_name="Son", last_name="2", born_year=1995
)
await artist.children.create(
first_name="Son", last_name="3", born_year=1998
)
album = await Album.objects.select_related("artist").first()
assert album.artist.last_name == "Mosbey"
assert album.artist.id is not None
assert album.artist.first_name == "Ted"
assert album.artist.born_year == 1975
assert album.name == "Aunt Robin"
artist = await Artist.objects.select_related("children").get()
assert len(artist.children) == 3
assert artist.children[0].first_name == "Son"
assert artist.children[1].last_name == "2"
assert artist.children[2].last_name == "3"
await artist.update(last_name="Bundy")
await Artist.objects.filter(pk=artist.pk).update(born_year=1974)
artist = await Artist.objects.select_related("children").get()
assert artist.last_name == "Bundy"
assert artist.born_year == 1974
artist = (
await Artist.objects.select_related("children")
.fields(
[
"first_name",
"last_name",
"born_year",
"children__first_name",
"children__last_name",
]
)
.get()
)
assert artist.children[0].born_year is None
@pytest.mark.asyncio
async def test_bulk_operations_and_fields():
async with base_ormar_config.database:
d1 = Child(first_name="Daughter", last_name="1", born_year=1990)
d2 = Child(first_name="Daughter", last_name="2", born_year=1991)
await Child.objects.bulk_create([d1, d2])
children = await Child.objects.filter(first_name="Daughter").all()
assert len(children) == 2
assert children[0].last_name == "1"
for child in children:
child.born_year = child.born_year - 100
await Child.objects.bulk_update(children)
children = await Child.objects.filter(first_name="Daughter").all()
assert len(children) == 2
assert children[0].born_year == 1890
children = await Child.objects.fields(["first_name", "last_name"]).all()
assert len(children) == 2
for child in children:
assert child.born_year is None
await children[0].load()
await children[0].delete()
children = await Child.objects.all()
assert len(children) == 1
@pytest.mark.asyncio
async def test_working_with_aliases_get_or_create():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
artist, created = await Artist.objects.get_or_create(
first_name="Teddy", last_name="Bear", born_year=2020
)
assert artist.pk is not None
assert created is True
artist2, created = await Artist.objects.get_or_create(
first_name="Teddy", last_name="Bear", born_year=2020
)
assert artist == artist2
assert created is False
art3 = artist2.model_dump()
art3["born_year"] = 2019
await Artist.objects.update_or_create(**art3)
artist3 = await Artist.objects.get(last_name="Bear")
assert artist3.born_year == 2019
artists = await Artist.objects.all()
assert len(artists) == 1
collerek-ormar-c09209a/tests/test_model_definition/test_columns.py 0000664 0000000 0000000 00000007337 15130200524 0025565 0 ustar 00root root 0000000 0000000 import datetime
from enum import Enum
import ormar
import pydantic
import pytest
from ormar import ModelDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
def time():
return datetime.datetime.now().time()
class MyEnum(Enum):
SMALL = 1
BIG = 2
class Example(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="example")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200, default="aaa")
created: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
created_day: datetime.date = ormar.Date(default=datetime.date.today)
created_time: datetime.time = ormar.Time(default=time)
description: str = ormar.Text(nullable=True)
value: float = ormar.Float(nullable=True)
data: pydantic.Json = ormar.JSON(default={})
size: MyEnum = ormar.Enum(enum_class=MyEnum, default=MyEnum.SMALL)
class EnumExample(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="enum_example")
id: int = ormar.Integer(primary_key=True)
size: MyEnum = ormar.Enum(enum_class=MyEnum, default=MyEnum.SMALL)
create_test_database = init_tests(base_ormar_config)
def test_proper_enum_column_type():
assert Example.model_fields["size"].__type__ == MyEnum
def test_accepts_only_proper_enums():
class WrongEnum(Enum):
A = 1
B = 2
with pytest.raises(pydantic.ValidationError):
Example(size=WrongEnum.A)
@pytest.mark.asyncio
async def test_enum_bulk_operations():
async with base_ormar_config.database:
examples = [EnumExample(), EnumExample()]
await EnumExample.objects.bulk_create(examples)
check = await EnumExample.objects.all()
assert all(x.size == MyEnum.SMALL for x in check)
for x in check:
x.size = MyEnum.BIG
await EnumExample.objects.bulk_update(check)
check2 = await EnumExample.objects.all()
assert all(x.size == MyEnum.BIG for x in check2)
@pytest.mark.asyncio
async def test_enum_filter():
async with base_ormar_config.database:
examples = [EnumExample(), EnumExample(size=MyEnum.BIG)]
await EnumExample.objects.bulk_create(examples)
check = await EnumExample.objects.all(size=MyEnum.SMALL)
assert len(check) == 1
check = await EnumExample.objects.all(size=MyEnum.BIG)
assert len(check) == 1
@pytest.mark.asyncio
async def test_model_crud():
async with base_ormar_config.database:
example = Example()
await example.save()
await example.load()
assert example.created.year == datetime.datetime.now().year
assert example.created_day == datetime.date.today()
assert example.description is None
assert example.value is None
assert example.data == {}
assert example.size == MyEnum.SMALL
await example.update(data={"foo": 123}, value=123.456, size=MyEnum.BIG)
await example.load()
assert example.value == 123.456
assert example.data == {"foo": 123}
assert example.size == MyEnum.BIG
await example.update(data={"foo": 123}, value=123.456)
await example.load()
assert example.value == 123.456
assert example.data == {"foo": 123}
await example.delete()
@pytest.mark.asyncio
async def test_invalid_enum_field() -> None:
async with base_ormar_config.database:
with pytest.raises(ModelDefinitionError):
class Example2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="example2")
id: int = ormar.Integer(primary_key=True)
size: MyEnum = ormar.Enum(enum_class=[]) # type: ignore
collerek-ormar-c09209a/tests/test_model_definition/test_create_uses_init_for_consistency.py 0000664 0000000 0000000 00000002653 15130200524 0032715 0 ustar 00root root 0000000 0000000 import uuid
from typing import ClassVar
import ormar
import pytest
from pydantic import model_validator
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Mol(ormar.Model):
# fixed namespace to generate always unique uuid from the smiles
_UUID_NAMESPACE: ClassVar[uuid.UUID] = uuid.UUID(
"12345678-abcd-1234-abcd-123456789abc"
)
ormar_config = base_ormar_config.copy(tablename="mols")
id: uuid.UUID = ormar.UUID(primary_key=True, index=True, uuid_format="hex")
smiles: str = ormar.String(nullable=False, unique=True, max_length=256)
def __init__(self, **kwargs):
# this is required to generate id from smiles in init, if id is not given
if "id" not in kwargs:
kwargs["id"] = self._UUID_NAMESPACE
super().__init__(**kwargs)
@model_validator(mode="before")
def make_canonical_smiles_and_uuid(cls, values):
values["id"], values["smiles"] = cls.uuid(values["smiles"])
return values
@classmethod
def uuid(cls, smiles):
id_ = uuid.uuid5(cls._UUID_NAMESPACE, smiles)
return id_, smiles
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_json_column():
async with base_ormar_config.database:
await Mol.objects.create(smiles="Cc1ccccc1")
count = await Mol.objects.count()
assert count == 1
collerek-ormar-c09209a/tests/test_model_definition/test_dates_with_timezone.py 0000664 0000000 0000000 00000007401 15130200524 0030142 0 ustar 00root root 0000000 0000000 from datetime import date, datetime, time, timedelta, timezone
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class DateFieldsModel(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
created_date: datetime = ormar.DateTime(
default=datetime.now(tz=timezone(timedelta(hours=3))), timezone=True
)
updated_date: datetime = ormar.DateTime(
default=datetime.now(tz=timezone(timedelta(hours=3))),
name="modification_date",
timezone=True,
)
class SampleModel(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
updated_at: datetime = ormar.DateTime()
class TimeModel(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
elapsed: time = ormar.Time()
class DateModel(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
creation_date: date = ormar.Date()
class MyModel(ormar.Model):
id: int = ormar.Integer(primary_key=True)
created_at: datetime = ormar.DateTime(timezone=True, nullable=False)
ormar_config = base_ormar_config.copy(tablename="mymodels")
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_model_crud_with_timezone():
async with base_ormar_config.database:
datemodel = await DateFieldsModel().save()
assert datemodel.created_date is not None
assert datemodel.updated_date is not None
@pytest.mark.asyncio
async def test_query_with_datetime_in_filter():
async with base_ormar_config.database:
creation_dt = datetime(2021, 5, 18, 0, 0, 0, 0)
sample = await SampleModel.objects.create(updated_at=creation_dt)
current_dt = datetime(2021, 5, 19, 0, 0, 0, 0)
outdated_samples = await SampleModel.objects.filter(
updated_at__lt=current_dt
).all()
assert outdated_samples[0] == sample
@pytest.mark.asyncio
async def test_query_with_date_in_filter():
async with base_ormar_config.database:
sample = await TimeModel.objects.create(elapsed=time(0, 20, 20))
await TimeModel.objects.create(elapsed=time(0, 12, 0))
await TimeModel.objects.create(elapsed=time(0, 19, 55))
sample4 = await TimeModel.objects.create(elapsed=time(0, 21, 15))
threshold = time(0, 20, 0)
samples = await TimeModel.objects.filter(TimeModel.elapsed >= threshold).all()
assert len(samples) == 2
assert samples[0] == sample
assert samples[1] == sample4
@pytest.mark.asyncio
async def test_query_with_time_in_filter():
async with base_ormar_config.database:
await DateModel.objects.create(creation_date=date(2021, 5, 18))
sample2 = await DateModel.objects.create(creation_date=date(2021, 5, 19))
sample3 = await DateModel.objects.create(creation_date=date(2021, 5, 20))
outdated_samples = await DateModel.objects.filter(
creation_date__in=[date(2021, 5, 19), date(2021, 5, 20)]
).all()
assert len(outdated_samples) == 2
assert outdated_samples[0] == sample2
assert outdated_samples[1] == sample3
@pytest.mark.asyncio
async def test_filtering_by_timezone_with_timedelta():
async with base_ormar_config.database:
now_utc = datetime.now(timezone.utc)
object = MyModel(created_at=now_utc)
await object.save()
one_hour_ago = datetime.now(timezone.utc) - timedelta(hours=1)
created_since_one_hour_ago = await MyModel.objects.filter(
created_at__gte=one_hour_ago
).all()
assert len(created_since_one_hour_ago) == 1
collerek-ormar-c09209a/tests/test_model_definition/test_equality_and_hash.py 0000664 0000000 0000000 00000002564 15130200524 0027564 0 ustar 00root root 0000000 0000000 # type: ignore
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Song(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="songs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_equality():
async with base_ormar_config.database:
song1 = await Song.objects.create(name="Song")
song2 = await Song.objects.create(name="Song")
song3 = Song(name="Song")
song4 = Song(name="Song")
assert song1 == song1
assert song3 == song4
assert song1 != song2
assert song1 != song3
assert song3 != song1
assert song1 is not None
@pytest.mark.asyncio
async def test_hash_doesnt_change_with_fields_if_pk():
async with base_ormar_config.database:
song1 = await Song.objects.create(name="Song")
prev_hash = hash(song1)
await song1.update(name="Song 2")
assert hash(song1) == prev_hash
@pytest.mark.asyncio
async def test_hash_changes_with_fields_if_no_pk():
async with base_ormar_config.database:
song1 = Song(name="Song")
prev_hash = hash(song1)
song1.name = "Song 2"
assert hash(song1) != prev_hash
collerek-ormar-c09209a/tests/test_model_definition/test_extra_ignore_parameter.py 0000664 0000000 0000000 00000001431 15130200524 0030620 0 ustar 00root root 0000000 0000000 import ormar
from ormar import Extra
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class Child(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="children",
extra=Extra.ignore,
)
id: int = ormar.Integer(name="child_id", primary_key=True)
first_name: str = ormar.String(name="fname", max_length=100)
last_name: str = ormar.String(name="lname", max_length=100)
create_test_database = init_tests(base_ormar_config)
def test_allow_extra_parameter():
child = Child(first_name="Test", last_name="Name", extra_param="Unexpected")
assert child.first_name == "Test"
assert child.last_name == "Name"
assert not hasattr(child, "extra_param")
collerek-ormar-c09209a/tests/test_model_definition/test_field_quoting.py 0000664 0000000 0000000 00000006230 15130200524 0026725 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class SchoolClass(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="app.schoolclasses")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="app.categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="app.students")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
gpa: float = ormar.Float()
schoolclass: Optional[SchoolClass] = ormar.ForeignKey(
SchoolClass, related_name="students"
)
category: Optional[Category] = ormar.ForeignKey(
Category, nullable=True, related_name="students"
)
create_test_database = init_tests(base_ormar_config)
async def create_data():
class1 = await SchoolClass.objects.create(name="Math")
class2 = await SchoolClass.objects.create(name="Logic")
category = await Category.objects.create(name="Foreign")
category2 = await Category.objects.create(name="Domestic")
await Student.objects.create(
name="Jane", category=category, schoolclass=class1, gpa=3.2
)
await Student.objects.create(
name="Judy", category=category2, schoolclass=class1, gpa=2.6
)
await Student.objects.create(
name="Jack", category=category2, schoolclass=class2, gpa=3.8
)
@pytest.mark.asyncio
async def test_quotes_left_join():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await create_data()
students = await Student.objects.filter(
(Student.schoolclass.name == "Math")
| (Student.category.name == "Foreign")
).all()
for student in students:
assert (
student.schoolclass.name == "Math"
or student.category.name == "Foreign"
)
@pytest.mark.asyncio
async def test_quotes_reverse_join():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await create_data()
schoolclasses = await SchoolClass.objects.filter(students__gpa__gt=3).all()
for schoolclass in schoolclasses:
for student in schoolclass.students:
assert student.gpa > 3
@pytest.mark.asyncio
async def test_quotes_deep_join():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await create_data()
schoolclasses = await SchoolClass.objects.filter(
students__category__name="Domestic"
).all()
for schoolclass in schoolclasses:
for student in schoolclass.students:
assert student.category.name == "Domestic"
collerek-ormar-c09209a/tests/test_model_definition/test_fields_access.py 0000664 0000000 0000000 00000017075 15130200524 0026674 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from ormar import BaseField
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class PriceList(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="price_lists")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
price_lists = ormar.ManyToMany(PriceList, related_name="categories")
class Product(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="product")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
rating: float = ormar.Float(minimum=1, maximum=5)
category = ormar.ForeignKey(Category)
create_test_database = init_tests(base_ormar_config)
def test_fields_access():
# basic access
assert Product.id._field == Product.ormar_config.model_fields["id"]
assert Product.id.id == Product.ormar_config.model_fields["id"]
assert Product.pk.id == Product.id.id
assert isinstance(Product.id._field, BaseField)
assert Product.id._access_chain == "id"
assert Product.id._source_model == Product
# nested models
curr_field = Product.category.name
assert curr_field._field == Category.ormar_config.model_fields["name"]
assert curr_field._access_chain == "category__name"
assert curr_field._source_model == Product
# deeper nesting
curr_field = Product.category.price_lists.name
assert curr_field._field == PriceList.ormar_config.model_fields["name"]
assert curr_field._access_chain == "category__price_lists__name"
assert curr_field._source_model == Product
# reverse nesting
curr_field = PriceList.categories.products.rating
assert curr_field._field == Product.ormar_config.model_fields["rating"]
assert curr_field._access_chain == "categories__products__rating"
assert curr_field._source_model == PriceList
with pytest.raises(AttributeError):
assert Product.category >= 3
@pytest.mark.parametrize(
"method, expected, expected_value",
[
("__eq__", "exact", "Test"),
("__lt__", "lt", "Test"),
("__le__", "lte", "Test"),
("__ge__", "gte", "Test"),
("__gt__", "gt", "Test"),
("iexact", "iexact", "Test"),
("contains", "contains", "Test"),
("icontains", "icontains", "Test"),
("startswith", "startswith", "Test"),
("istartswith", "istartswith", "Test"),
("endswith", "endswith", "Test"),
("iendswith", "iendswith", "Test"),
("isnull", "isnull", "Test"),
("in_", "in", "Test"),
("__lshift__", "in", "Test"),
("__rshift__", "isnull", True),
("__mod__", "contains", "Test"),
],
)
def test_operator_return_proper_filter_action(method, expected, expected_value):
group_ = getattr(Product.name, method)("Test")
assert group_._kwargs_dict == {f"name__{expected}": expected_value}
group_ = getattr(Product.category.name, method)("Test")
assert group_._kwargs_dict == {f"category__name__{expected}": expected_value}
group_ = getattr(PriceList.categories.products.rating, method)("Test")
assert group_._kwargs_dict == {
f"categories__products__rating__{expected}": expected_value
}
@pytest.mark.parametrize("method, expected_direction", [("asc", ""), ("desc", "desc")])
def test_operator_return_proper_order_action(method, expected_direction):
action = getattr(Product.name, method)()
assert action.source_model == Product
assert action.target_model == Product
assert action.direction == expected_direction
assert action.is_source_model_order
action = getattr(Product.category.name, method)()
assert action.source_model == Product
assert action.target_model == Category
assert action.direction == expected_direction
assert not action.is_source_model_order
action = getattr(PriceList.categories.products.rating, method)()
assert action.source_model == PriceList
assert action.target_model == Product
assert action.direction == expected_direction
assert not action.is_source_model_order
def test_combining_groups_together():
group = (Product.name == "Test") & (Product.rating >= 3.0)
group.resolve(model_cls=Product)
assert len(group._nested_groups) == 2
assert str(
group.get_text_clause().compile(compile_kwargs={"literal_binds": True})
) == ("((product.name = 'Test') AND (product.rating >= 3.0))")
group = ~((Product.name == "Test") & (Product.rating >= 3.0))
group.resolve(model_cls=Product)
assert len(group._nested_groups) == 2
assert str(
group.get_text_clause().compile(compile_kwargs={"literal_binds": True})
) == ("NOT ((product.name = 'Test') AND" " (product.rating >= 3.0))")
group = ((Product.name == "Test") & (Product.rating >= 3.0)) | (
Product.category.name << (["Toys", "Books"])
)
group.resolve(model_cls=Product)
assert len(group._nested_groups) == 2
assert len(group._nested_groups[0]._nested_groups) == 2
group_str = str(
group.get_text_clause().compile(compile_kwargs={"literal_binds": True})
)
category_prefix = group._nested_groups[1].actions[0].table_prefix
assert group_str == (
"(((product.name = 'Test') AND (product.rating >= 3.0)) "
f"OR ({category_prefix}_categories.name IN ('Toys', 'Books')))"
)
group = (Product.name % "Test") | (
(Product.category.price_lists.name.startswith("Aa"))
| (Product.category.name << (["Toys", "Books"]))
)
group.resolve(model_cls=Product)
assert len(group._nested_groups) == 2
assert len(group._nested_groups[1]._nested_groups) == 2
group_str = str(
group.get_text_clause().compile(compile_kwargs={"literal_binds": True})
)
price_list_prefix = (
group._nested_groups[1]._nested_groups[0].actions[0].table_prefix
)
category_prefix = group._nested_groups[1]._nested_groups[1].actions[0].table_prefix
assert group_str == (
f"((product.name LIKE '%Test%') "
f"OR ({price_list_prefix}_price_lists.name LIKE 'Aa%') "
f"OR ({category_prefix}_categories.name IN ('Toys', 'Books')))"
)
group = (Product.name % "Test") & (
(Product.category.price_lists.name.startswith("Aa"))
| (Product.category.name << (["Toys", "Books"]))
)
group.resolve(model_cls=Product)
assert len(group._nested_groups) == 2
assert len(group._nested_groups[1]._nested_groups) == 2
group_str = str(
group.get_text_clause().compile(compile_kwargs={"literal_binds": True})
)
price_list_prefix = (
group._nested_groups[1]._nested_groups[0].actions[0].table_prefix
)
category_prefix = group._nested_groups[1]._nested_groups[1].actions[0].table_prefix
assert group_str == (
f"((product.name LIKE '%Test%') "
f"AND (({price_list_prefix}_price_lists.name LIKE 'Aa%') "
f"OR ({category_prefix}_categories.name IN ('Toys', 'Books'))))"
)
@pytest.mark.asyncio
async def test_filtering_by_field_access():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
category = await Category(name="Toys").save()
product2 = await Product(
name="My Little Pony", rating=3.8, category=category
).save()
check = await Product.objects.get(Product.name == "My Little Pony")
assert check == product2
collerek-ormar-c09209a/tests/test_model_definition/test_foreign_key_value_used_for_related_model.py 0000664 0000000 0000000 00000003736 15130200524 0034347 0 ustar 00root root 0000000 0000000 import uuid
from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class PageLink(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="pagelinks")
id: int = ormar.Integer(primary_key=True)
value: str = ormar.String(max_length=2048)
country: str = ormar.String(max_length=1000)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=500)
link: PageLink = ormar.ForeignKey(
PageLink, related_name="posts", ondelete="CASCADE"
)
class Department(ormar.Model):
ormar_config = base_ormar_config.copy()
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4())
name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
department: Optional[Department] = ormar.ForeignKey(Department)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_pass_int_values_as_fk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
link = await PageLink(id=1, value="test", country="USA").save()
await Post.objects.create(title="My post", link=link.id)
post_check = await Post.objects.select_related("link").get()
assert post_check.link == link
@pytest.mark.asyncio
async def test_pass_uuid_value_as_fk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
dept = await Department(name="Department test").save()
await Course(name="Test course", department=dept.id).save()
collerek-ormar-c09209a/tests/test_model_definition/test_iterate.py 0000664 0000000 0000000 00000021326 15130200524 0025534 0 ustar 00root root 0000000 0000000 import uuid
import ormar
import pytest
from ormar.exceptions import QueryDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users3")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="")
class User2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users4")
id: uuid.UUID = ormar.UUID(
uuid_format="string", primary_key=True, default=uuid.uuid4
)
name: str = ormar.String(max_length=100, default="")
class Task(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tasks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="")
user: User = ormar.ForeignKey(to=User)
class Task2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tasks2")
id: uuid.UUID = ormar.UUID(
uuid_format="string", primary_key=True, default=uuid.uuid4
)
name: str = ormar.String(max_length=100, default="")
user: User2 = ormar.ForeignKey(to=User2)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_empty_result():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
async for user in User.objects.iterate():
pass # pragma: no cover
@pytest.mark.asyncio
async def test_model_iterator():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User.objects.create(name="Tom")
jane = await User.objects.create(name="Jane")
lucy = await User.objects.create(name="Lucy")
async for user in User.objects.iterate():
assert user in (tom, jane, lucy)
@pytest.mark.asyncio
async def test_model_iterator_filter():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User.objects.create(name="Tom")
await User.objects.create(name="Jane")
await User.objects.create(name="Lucy")
async for user in User.objects.iterate(name="Tom"):
assert user.name == tom.name
@pytest.mark.asyncio
async def test_model_iterator_relations():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User.objects.create(name="Tom")
jane = await User.objects.create(name="Jane")
lucy = await User.objects.create(name="Lucy")
for user in tom, jane, lucy:
await Task.objects.create(name="task1", user=user)
await Task.objects.create(name="task2", user=user)
results = []
async for user in User.objects.select_related(User.tasks).iterate():
assert len(user.tasks) == 2
results.append(user)
assert len(results) == 3
@pytest.mark.asyncio
async def test_model_iterator_relations_queryset_proxy():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User.objects.create(name="Tom")
jane = await User.objects.create(name="Jane")
for user in tom, jane:
await Task.objects.create(name="task1", user=user)
await Task.objects.create(name="task2", user=user)
tom_tasks = []
async for task in tom.tasks.iterate():
assert task.name in ("task1", "task2")
tom_tasks.append(task)
assert len(tom_tasks) == 2
jane_tasks = []
async for task in jane.tasks.iterate():
assert task.name in ("task1", "task2")
jane_tasks.append(task)
assert len(jane_tasks) == 2
@pytest.mark.asyncio
async def test_model_iterator_uneven_number_of_relations():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User.objects.create(name="Tom")
jane = await User.objects.create(name="Jane")
lucy = await User.objects.create(name="Lucy")
for user in tom, jane:
await Task.objects.create(name="task1", user=user)
await Task.objects.create(name="task2", user=user)
await Task.objects.create(name="task3", user=lucy)
expected_counts = {"Tom": 2, "Jane": 2, "Lucy": 1}
results = []
async for user in User.objects.select_related(User.tasks).iterate():
assert len(user.tasks) == expected_counts[user.name]
results.append(user)
assert len(results) == 3
@pytest.mark.asyncio
async def test_model_iterator_uuid_pk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User2.objects.create(name="Tom")
jane = await User2.objects.create(name="Jane")
lucy = await User2.objects.create(name="Lucy")
async for user in User2.objects.iterate():
assert user in (tom, jane, lucy)
@pytest.mark.asyncio
async def test_model_iterator_filter_uuid_pk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User2.objects.create(name="Tom")
await User2.objects.create(name="Jane")
await User2.objects.create(name="Lucy")
async for user in User2.objects.iterate(name="Tom"):
assert user.name == tom.name
@pytest.mark.asyncio
async def test_model_iterator_relations_uuid_pk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User2.objects.create(name="Tom")
jane = await User2.objects.create(name="Jane")
lucy = await User2.objects.create(name="Lucy")
for user in tom, jane, lucy:
await Task2.objects.create(name="task1", user=user)
await Task2.objects.create(name="task2", user=user)
results = []
async for user in User2.objects.select_related(User2.task2s).iterate():
assert len(user.task2s) == 2
results.append(user)
assert len(results) == 3
@pytest.mark.asyncio
async def test_model_iterator_relations_queryset_proxy_uuid_pk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User2.objects.create(name="Tom")
jane = await User2.objects.create(name="Jane")
for user in tom, jane:
await Task2.objects.create(name="task1", user=user)
await Task2.objects.create(name="task2", user=user)
tom_tasks = []
async for task in tom.task2s.iterate():
assert task.name in ("task1", "task2")
tom_tasks.append(task)
assert len(tom_tasks) == 2
jane_tasks = []
async for task in jane.task2s.iterate():
assert task.name in ("task1", "task2")
jane_tasks.append(task)
assert len(jane_tasks) == 2
@pytest.mark.asyncio
async def test_model_iterator_uneven_number_of_relations_uuid_pk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
tom = await User2.objects.create(name="Tom")
jane = await User2.objects.create(name="Jane")
lucy = await User2.objects.create(name="Lucy")
for user in tom, jane:
await Task2.objects.create(name="task1", user=user)
await Task2.objects.create(name="task2", user=user)
await Task2.objects.create(name="task3", user=lucy)
expected_counts = {"Tom": 2, "Jane": 2, "Lucy": 1}
results = []
async for user in User2.objects.select_related(User2.task2s).iterate():
assert len(user.task2s) == expected_counts[user.name]
results.append(user)
assert len(results) == 3
@pytest.mark.asyncio
async def test_model_iterator_with_prefetch_raises_error():
async with base_ormar_config.database:
with pytest.raises(QueryDefinitionError):
async for user in User.objects.prefetch_related(User.tasks).iterate():
pass # pragma: no cover
collerek-ormar-c09209a/tests/test_model_definition/test_model_construct.py 0000664 0000000 0000000 00000005601 15130200524 0027301 0 ustar 00root root 0000000 0000000 from typing import List
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class NickNames(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
class NicksHq(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks_x_hq")
class HQ(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="hqs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
nicks: List[NickNames] = ormar.ManyToMany(NickNames, through=NicksHq)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="company_name")
founded: int = ormar.Integer(nullable=True)
hq: HQ = ormar.ForeignKey(HQ)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_construct_with_empty_relation():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await HQ.objects.create(name="Main")
comp = Company(name="Banzai", hq=None, founded=1988)
comp2 = Company.model_construct(
**dict(name="Banzai", hq=None, founded=1988)
)
assert comp.model_dump() == comp2.model_dump()
@pytest.mark.asyncio
async def test_init_and_construct_has_same_effect():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
hq = await HQ.objects.create(name="Main")
comp = Company(name="Banzai", hq=hq, founded=1988)
comp2 = Company.model_construct(**dict(name="Banzai", hq=hq, founded=1988))
assert comp.model_dump() == comp2.model_dump()
comp3 = Company.model_construct(
**dict(name="Banzai", hq=hq.model_dump(), founded=1988)
)
assert comp.model_dump() == comp3.model_dump()
@pytest.mark.asyncio
async def test_init_and_construct_has_same_effect_with_m2m():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
n1 = await NickNames(name="test").save()
n2 = await NickNames(name="test2").save()
hq = HQ(name="Main", nicks=[n1, n2])
hq2 = HQ.model_construct(**dict(name="Main", nicks=[n1, n2]))
assert hq.model_dump() == hq2.model_dump()
hq3 = HQ.model_construct(
**dict(name="Main", nicks=[n1.model_dump(), n2.model_dump()])
)
assert hq.model_dump() == hq3.model_dump()
collerek-ormar-c09209a/tests/test_model_definition/test_model_definition.py 0000664 0000000 0000000 00000015320 15130200524 0027404 0 ustar 00root root 0000000 0000000 # type: ignore
import datetime
import decimal
import typing
import ormar
import pydantic
import pytest
import sqlalchemy
from ormar.exceptions import ModelDefinitionError
from ormar.models import Model
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class ExampleModel(Model):
ormar_config = base_ormar_config.copy(tablename="example")
test: int = ormar.Integer(primary_key=True)
test_string: str = ormar.String(max_length=250)
test_text: str = ormar.Text(default="")
test_bool: bool = ormar.Boolean(nullable=False)
test_float = ormar.Float(nullable=True)
test_datetime = ormar.DateTime(default=datetime.datetime.now)
test_date = ormar.Date(default=datetime.date.today)
test_time = ormar.Time(default=datetime.time)
test_json = ormar.JSON(default={})
test_bigint: int = ormar.BigInteger(default=0)
test_smallint: int = ormar.SmallInteger(default=0)
test_decimal = ormar.Decimal(scale=2, precision=10)
test_decimal2 = ormar.Decimal(max_digits=10, decimal_places=2)
fields_to_check = [
"test",
"test_text",
"test_string",
"test_datetime",
"test_date",
"test_text",
"test_float",
"test_bigint",
"test_json",
]
class ExampleModel2(Model):
ormar_config = base_ormar_config.copy(tablename="example2")
test: int = ormar.Integer(primary_key=True)
test_string: str = ormar.String(max_length=250)
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users")
id: int = ormar.Integer(primary_key=True)
class Account(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="accounts")
id: int = ormar.Integer(primary_key=True)
user: User = ormar.ForeignKey(User, index=False)
class Purchase(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="purchases")
id: int = ormar.Integer(primary_key=True)
user: User = ormar.ForeignKey(User, index=True)
create_test_database = init_tests(base_ormar_config)
@pytest.fixture()
def example():
return ExampleModel(
pk=1,
test_string="test",
test_bool=True,
test_decimal=decimal.Decimal(3.5),
test_decimal2=decimal.Decimal(5.5),
)
def test_not_nullable_field_is_required():
with pytest.raises(pydantic.ValidationError):
ExampleModel(test=1, test_string="test")
def test_model_attribute_access(example):
assert example.test == 1
assert example.test_string == "test"
assert example.test_datetime.year == datetime.datetime.now().year
assert example.test_date == datetime.date.today()
assert example.test_text == ""
assert example.test_float is None
assert example.test_bigint == 0
assert example.test_json == {}
assert example.test_decimal == 3.5
assert example.test_decimal2 == 5.5
example.test = 12
assert example.test == 12
example._orm_saved = True
assert example._orm_saved
def test_model_attribute_json_access(example):
example.test_json = dict(aa=12)
assert example.test_json == dict(aa=12)
def test_missing_metadata():
with pytest.raises(ModelDefinitionError):
class JsonSample2(ormar.Model):
ormar_config = ormar.OrmarConfig(
tablename="jsons2",
database=base_ormar_config.database,
)
id: int = ormar.Integer(primary_key=True)
test_json = ormar.JSON(nullable=True)
def test_missing_database():
with pytest.raises(ModelDefinitionError):
class JsonSample3(ormar.Model):
ormar_config = ormar.OrmarConfig(tablename="jsons3")
id: int = ormar.Integer(primary_key=True)
test_json = ormar.JSON(nullable=True)
def test_wrong_pydantic_config():
with pytest.raises(ModelDefinitionError):
class ErrorSample(ormar.Model):
model_config = ["test"]
ormar_config = ormar.OrmarConfig(tablename="jsons3")
id: int = ormar.Integer(primary_key=True)
test_json = ormar.JSON(nullable=True)
def test_non_existing_attr(example):
with pytest.raises(ValueError):
example.new_attr = 12
def test_primary_key_access_and_setting(example):
assert example.pk == 1
example.pk = 2
assert example.pk == 2
assert example.test == 2
def test_pydantic_model_is_created(example):
assert issubclass(example.__class__, pydantic.BaseModel)
assert all([field in example.__class__.model_fields for field in fields_to_check])
assert example.test == 1
def test_sqlalchemy_table_is_created(example):
assert issubclass(example.ormar_config.table.__class__, sqlalchemy.Table)
assert all(
[field in example.ormar_config.table.columns for field in fields_to_check]
)
@typing.no_type_check
def test_no_pk_in_model_definition():
with pytest.raises(ModelDefinitionError): # type: ignore
class ExampleModel2(Model): # type: ignore
ormar_config = base_ormar_config.copy(tablename="example2")
test_string: str = ormar.String(max_length=250) # type: ignore
@typing.no_type_check
def test_two_pks_in_model_definition():
with pytest.raises(ModelDefinitionError):
@typing.no_type_check
class ExampleModel2(Model):
ormar_config = base_ormar_config.copy(tablename="example3")
id: int = ormar.Integer(primary_key=True)
test_string: str = ormar.String(max_length=250, primary_key=True)
@typing.no_type_check
def test_decimal_error_in_model_definition():
with pytest.raises(ModelDefinitionError):
class ExampleModel2(Model):
ormar_config = base_ormar_config.copy(tablename="example5")
test: decimal.Decimal = ormar.Decimal(primary_key=True)
@typing.no_type_check
def test_binary_error_without_length_model_definition():
with pytest.raises(ModelDefinitionError):
class ExampleModel2(Model):
ormar_config = base_ormar_config.copy(tablename="example6")
test: bytes = ormar.LargeBinary(primary_key=True, max_length=-1)
@typing.no_type_check
def test_string_error_in_model_definition():
with pytest.raises(ModelDefinitionError):
class ExampleModel2(Model):
ormar_config = base_ormar_config.copy(tablename="example6")
test: str = ormar.String(primary_key=True, max_length=0)
@typing.no_type_check
def test_json_conversion_in_model():
with pytest.raises(pydantic.ValidationError):
ExampleModel(
test_json=datetime.datetime.now(),
test=1,
test_string="test",
test_bool=True,
)
def test_foreign_key_index():
assert Account.ormar_config.table.columns.user.index is False
assert Purchase.ormar_config.table.columns.user.index is True
collerek-ormar-c09209a/tests/test_model_definition/test_models.py 0000664 0000000 0000000 00000050641 15130200524 0025364 0 ustar 00root root 0000000 0000000 import asyncio
import base64
import datetime
import os
import uuid
from enum import Enum
import ormar
import pydantic
import pytest
import sqlalchemy
from ormar.exceptions import ModelError, NoMatch, QueryDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class JsonSample(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="jsons")
id: int = ormar.Integer(primary_key=True)
test_json = ormar.JSON(nullable=True)
blob = b"test"
blob2 = b"test2icac89uc98"
class LargeBinarySample(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="my_bolbs")
id: int = ormar.Integer(primary_key=True)
test_binary: bytes = ormar.LargeBinary(max_length=100000)
blob3 = os.urandom(64)
blob4 = os.urandom(100)
class LargeBinaryStr(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="my_str_bolbs")
id: int = ormar.Integer(primary_key=True)
test_binary: str = ormar.LargeBinary(
max_length=100000, represent_as_base64_str=True
)
class LargeBinaryNullableStr(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="my_str_bolbs2")
id: int = ormar.Integer(primary_key=True)
test_binary: str = ormar.LargeBinary(
max_length=100000,
represent_as_base64_str=True,
nullable=True,
)
class UUIDSample(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="uuids")
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
test_text: str = ormar.Text()
class UUIDSample2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="uuids2")
id: uuid.UUID = ormar.UUID(
primary_key=True, default=uuid.uuid4, uuid_format="string"
)
test_text: str = ormar.Text()
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, default="")
class User2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users2")
id: str = ormar.String(primary_key=True, max_length=100)
name: str = ormar.String(max_length=100, default="")
class Product(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="product")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
rating: int = ormar.Integer(minimum=1, maximum=5)
in_stock: bool = ormar.Boolean(default=False)
last_delivery: datetime.date = ormar.Date(default=datetime.date.today)
class CountryNameEnum(Enum):
CANADA = "Canada"
ALGERIA = "Algeria"
USA = "United States"
BELIZE = "Belize"
class CountryCodeEnum(int, Enum):
MINUS_TEN = -10
ONE = 1
TWO_HUNDRED_THIRTEEN = 213
THOUSAND_TWO_HUNDRED = 1200
class Country(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="country")
id: int = ormar.Integer(primary_key=True)
name: CountryNameEnum = ormar.Enum(enum_class=CountryNameEnum, default="Canada")
taxed: bool = ormar.Boolean(default=True)
country_code: int = ormar.Enum(enum_class=CountryCodeEnum, default=1)
class NullableCountry(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="country2")
id: int = ormar.Integer(primary_key=True)
name: CountryNameEnum = ormar.Enum(enum_class=CountryNameEnum, nullable=True)
class NotNullableCountry(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="country3")
id: int = ormar.Integer(primary_key=True)
name: CountryNameEnum = ormar.Enum(enum_class=CountryNameEnum, nullable=False)
create_test_database = init_tests(base_ormar_config)
def test_model_class():
assert list(User.ormar_config.model_fields.keys()) == ["id", "name"]
assert issubclass(
User.ormar_config.model_fields["id"].__class__, pydantic.fields.FieldInfo
)
assert User.ormar_config.model_fields["id"].primary_key is True
assert isinstance(User.ormar_config.model_fields["name"], pydantic.fields.FieldInfo)
assert User.ormar_config.model_fields["name"].max_length == 100
assert isinstance(User.ormar_config.table, sqlalchemy.Table)
def test_wrong_field_name():
with pytest.raises(ModelError):
User(non_existing_pk=1)
def test_model_pk():
user = User(pk=1)
assert user.pk == 1
assert user.id == 1
@pytest.mark.asyncio
async def test_json_column():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await JsonSample.objects.create(test_json=dict(aa=12))
await JsonSample.objects.create(test_json='{"aa": 12}')
items = await JsonSample.objects.all()
assert len(items) == 2
assert items[0].test_json == dict(aa=12)
assert items[1].test_json == dict(aa=12)
items[0].test_json = "[1, 2, 3]"
assert items[0].test_json == [1, 2, 3]
@pytest.mark.asyncio
async def test_binary_column():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await LargeBinarySample.objects.create(test_binary=blob)
await LargeBinarySample.objects.create(test_binary=blob2)
items = await LargeBinarySample.objects.all()
assert len(items) == 2
assert items[0].test_binary == blob
assert items[1].test_binary == blob2
items[0].test_binary = "test2icac89uc98"
assert items[0].test_binary == b"test2icac89uc98"
@pytest.mark.asyncio
async def test_binary_str_column():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await LargeBinaryStr(test_binary=blob3).save()
await LargeBinaryStr.objects.create(test_binary=blob4)
items = await LargeBinaryStr.objects.all()
assert len(items) == 2
assert items[0].test_binary == base64.b64encode(blob3).decode()
items[0].test_binary = base64.b64encode(blob4).decode()
assert items[0].test_binary == base64.b64encode(blob4).decode()
assert items[1].test_binary == base64.b64encode(blob4).decode()
assert items[1].__dict__["test_binary"] == blob4
@pytest.mark.asyncio
async def test_binary_nullable_str_column():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await LargeBinaryNullableStr().save()
await LargeBinaryNullableStr.objects.create()
items = await LargeBinaryNullableStr.objects.all()
assert len(items) == 2
items[0].test_binary = blob3
items[1].test_binary = blob4
await LargeBinaryNullableStr.objects.bulk_update(items)
items = await LargeBinaryNullableStr.objects.all()
assert len(items) == 2
assert items[0].test_binary == base64.b64encode(blob3).decode()
items[0].test_binary = base64.b64encode(blob4).decode()
assert items[0].test_binary == base64.b64encode(blob4).decode()
assert items[1].test_binary == base64.b64encode(blob4).decode()
assert items[1].__dict__["test_binary"] == blob4
await LargeBinaryNullableStr.objects.bulk_create(
[LargeBinaryNullableStr(), LargeBinaryNullableStr(test_binary=blob3)]
)
items = await LargeBinaryNullableStr.objects.all()
assert len(items) == 4
await items[0].update(test_binary=blob4)
check_item = await LargeBinaryNullableStr.objects.get(id=items[0].id)
assert check_item.test_binary == base64.b64encode(blob4).decode()
@pytest.mark.asyncio
async def test_uuid_column():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
u1 = await UUIDSample.objects.create(test_text="aa")
u2 = await UUIDSample.objects.create(test_text="bb")
items = await UUIDSample.objects.all()
assert len(items) == 2
assert isinstance(items[0].id, uuid.UUID)
assert isinstance(items[1].id, uuid.UUID)
assert items[0].id in (u1.id, u2.id)
assert items[1].id in (u1.id, u2.id)
assert items[0].id != items[1].id
item = await UUIDSample.objects.filter(id=u1.id).get()
assert item.id == u1.id
item2 = await UUIDSample.objects.first()
item3 = await UUIDSample.objects.get(pk=item2.id)
assert item2.id == item3.id
assert isinstance(item3.id, uuid.UUID)
u3 = await UUIDSample2(**u1.model_dump()).save()
u1_2 = await UUIDSample.objects.get(pk=u3.id)
assert u1_2 == u1
u4 = await UUIDSample2.objects.get(pk=u3.id)
assert u3 == u4
@pytest.mark.asyncio
async def test_model_crud():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
users = await User.objects.all()
assert users == []
user = await User.objects.create(name="Tom")
users = await User.objects.all()
assert user.name == "Tom"
assert user.pk is not None
assert users == [user]
lookup = await User.objects.get()
assert lookup == user
await user.update(name="Jane")
users = await User.objects.all()
assert user.name == "Jane"
assert user.pk is not None
assert users == [user]
await user.delete()
users = await User.objects.all()
assert users == []
@pytest.mark.asyncio
async def test_model_get():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
with pytest.raises(ormar.NoMatch):
await User.objects.get()
assert await User.objects.get_or_none() is None
user = await User.objects.create(name="Tom")
lookup = await User.objects.get()
assert lookup == user
user2 = await User.objects.create(name="Jane")
await User.objects.create(name="Jane")
with pytest.raises(ormar.MultipleMatches):
await User.objects.get(name="Jane")
same_user = await User.objects.get(pk=user2.id)
assert same_user.id == user2.id
assert same_user.pk == user2.pk
assert await User.objects.order_by("-name").get() == user
@pytest.mark.asyncio
async def test_model_filter():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await User.objects.create(name="Tom")
await User.objects.create(name="Jane")
await User.objects.create(name="Lucy")
user = await User.objects.get(name="Lucy")
assert user.name == "Lucy"
with pytest.raises(ormar.NoMatch):
await User.objects.get(name="Jim")
await Product.objects.create(name="T-Shirt", rating=5, in_stock=True)
await Product.objects.create(name="Dress", rating=4)
await Product.objects.create(name="Coat", rating=3, in_stock=True)
product = await Product.objects.get(name__iexact="t-shirt", rating=5)
assert product.pk is not None
assert product.name == "T-Shirt"
assert product.rating == 5
assert product.last_delivery == datetime.datetime.now().date()
products = await Product.objects.all(rating__gte=2, in_stock=True)
assert len(products) == 2
products = await Product.objects.all(name__icontains="T")
assert len(products) == 2
products = await Product.objects.exclude(rating__gte=4).all()
assert len(products) == 1
products = await Product.objects.exclude(rating__gte=4, in_stock=True).all()
assert len(products) == 2
products = await Product.objects.exclude(in_stock=True).all()
assert len(products) == 1
products = await Product.objects.exclude(name__icontains="T").all()
assert len(products) == 1
# Test escaping % character from icontains, contains, and iexact
await Product.objects.create(name="100%-Cotton", rating=3)
await Product.objects.create(name="Cotton-100%-Egyptian", rating=3)
await Product.objects.create(name="Cotton-100%", rating=3)
products = Product.objects.filter(name__iexact="100%-cotton")
assert await products.count() == 1
products = Product.objects.filter(name__contains="%")
assert await products.count() == 3
products = Product.objects.filter(name__icontains="%")
assert await products.count() == 3
@pytest.mark.asyncio
async def test_wrong_query_contains_model():
async with base_ormar_config.database:
with pytest.raises(QueryDefinitionError):
product = Product(name="90%-Cotton", rating=2)
await Product.objects.filter(name__contains=product).count()
@pytest.mark.asyncio
async def test_model_exists():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await User.objects.create(name="Tom")
assert await User.objects.filter(name="Tom").exists() is True
assert await User.objects.filter(name="Jane").exists() is False
@pytest.mark.asyncio
async def test_model_count():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await User.objects.create(name="Tom")
await User.objects.create(name="Jane")
await User.objects.create(name="Lucy")
assert await User.objects.count() == 3
assert await User.objects.filter(name__icontains="T").count() == 1
@pytest.mark.asyncio
async def test_model_limit():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await User.objects.create(name="Tom")
await User.objects.create(name="Jane")
await User.objects.create(name="Lucy")
assert len(await User.objects.limit(2).all()) == 2
@pytest.mark.asyncio
async def test_model_limit_with_filter():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await User.objects.create(name="Tom")
await User.objects.create(name="Tom")
await User.objects.create(name="Tom")
assert (
len(await User.objects.limit(2).filter(name__iexact="Tom").all()) == 2
)
@pytest.mark.asyncio
async def test_offset():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await User.objects.create(name="Tom")
await User.objects.create(name="Jane")
users = await User.objects.offset(1).limit(1).all()
assert users[0].name == "Jane"
@pytest.mark.asyncio
async def test_model_first():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
with pytest.raises(ormar.NoMatch):
await User.objects.first()
assert await User.objects.first_or_none() is None
tom = await User.objects.create(name="Tom")
jane = await User.objects.create(name="Jane")
assert await User.objects.first() == tom
assert await User.objects.first(name="Jane") == jane
assert await User.objects.filter(name="Jane").first() == jane
with pytest.raises(NoMatch):
await User.objects.filter(name="Lucy").first()
assert await User.objects.first_or_none(name="Lucy") is None
assert await User.objects.filter(name="Lucy").first_or_none() is None
assert await User.objects.order_by("name").first() == jane
@pytest.mark.asyncio
async def test_model_choices():
"""Test that enum work properly for various types of fields."""
async with base_ormar_config.database:
# Test valid enums values.
await asyncio.gather(
Country.objects.create(name="Canada", taxed=True, country_code=1),
Country.objects.create(name="Algeria", taxed=True, country_code=213),
Country.objects.create(name="Algeria"),
)
with pytest.raises(ValueError):
name, taxed, country_code = "Saudi Arabia", True, 1
await Country.objects.create(
name=name, taxed=taxed, country_code=country_code
)
with pytest.raises(ValueError):
name, taxed, country_code = "Algeria", True, 967
await Country.objects.create(
name=name, taxed=taxed, country_code=country_code
)
# test setting after init also triggers validation
with pytest.raises(ValueError):
name, taxed, country_code = "Algeria", True, 967
country = Country()
country.country_code = country_code
with pytest.raises(ValueError):
name, taxed, country_code = "Saudi Arabia", True, 1
country = Country()
country.name = name
# check also update from queryset
with pytest.raises(ValueError):
await Country(name="Belize").save()
await Country.objects.filter(name="Belize").update(name="Vietnam")
@pytest.mark.asyncio
async def test_nullable_field_model_enum():
"""Test that enum work properly for according to nullable setting"""
async with base_ormar_config.database:
c1 = await NullableCountry(name=None).save()
assert c1.name is None
with pytest.raises(ValueError):
await NotNullableCountry(name=None).save()
@pytest.mark.asyncio
async def test_start_and_end_filters():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await User.objects.create(name="Markos Uj")
await User.objects.create(name="Maqua Bigo")
await User.objects.create(name="maqo quidid")
await User.objects.create(name="Louis Figo")
await User.objects.create(name="Loordi Kami")
await User.objects.create(name="Yuuki Sami")
users = await User.objects.filter(name__startswith="Mar").all()
assert len(users) == 1
users = await User.objects.filter(name__istartswith="ma").all()
assert len(users) == 3
users = await User.objects.filter(name__istartswith="Maq").all()
assert len(users) == 2
users = await User.objects.filter(name__iendswith="AMI").all()
assert len(users) == 2
users = await User.objects.filter(name__endswith="Uj").all()
assert len(users) == 1
users = await User.objects.filter(name__endswith="igo").all()
assert len(users) == 2
@pytest.mark.asyncio
async def test_get_and_first():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await User.objects.create(name="Tom")
await User.objects.create(name="Jane")
await User.objects.create(name="Lucy")
await User.objects.create(name="Zack")
await User.objects.create(name="Ula")
user = await User.objects.get()
assert user.name == "Ula"
user = await User.objects.first()
assert user.name == "Tom"
await User2.objects.create(id="Tom", name="Tom")
await User2.objects.create(id="Jane", name="Jane")
await User2.objects.create(id="Lucy", name="Lucy")
await User2.objects.create(id="Zack", name="Zack")
await User2.objects.create(id="Ula", name="Ula")
user = await User2.objects.get()
assert user.name == "Zack"
user = await User2.objects.first()
assert user.name == "Jane"
def test_constraints():
with pytest.raises(pydantic.ValidationError) as e:
Product(name="T-Shirt", rating=50, in_stock=True)
assert "Input should be less than or equal to 5 " in str(e.value)
collerek-ormar-c09209a/tests/test_model_definition/test_models_are_pickable.py 0000664 0000000 0000000 00000002610 15130200524 0030036 0 ustar 00root root 0000000 0000000 import pickle
from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
properties = ormar.JSON(nullable=True)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
created_by: Optional[User] = ormar.ForeignKey(User)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_dumping_and_loading_model_works():
async with base_ormar_config.database:
user = await User(name="Test", properties={"aa": "bb"}).save()
post = Post(name="Test post")
await user.posts.add(post)
pickled_value = pickle.dumps(user)
python_value = pickle.loads(pickled_value)
assert isinstance(python_value, User)
assert python_value.name == "Test"
assert python_value.properties == {"aa": "bb"}
assert python_value.posts[0].name == "Test post"
await python_value.load()
await python_value.update(name="Test2")
check = await User.objects.get()
assert check.name == "Test2"
collerek-ormar-c09209a/tests/test_model_definition/test_overwriting_pydantic_field_type.py 0000664 0000000 0000000 00000003647 15130200524 0032563 0 ustar 00root root 0000000 0000000 from typing import Dict, Optional
import ormar
import pydantic
import pytest
from pydantic import Json, PositiveInt, ValidationError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class OverwriteTest(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="overwrites")
id: int = ormar.Integer(primary_key=True)
my_int: int = ormar.Integer(overwrite_pydantic_type=PositiveInt)
constraint_dict: Json = ormar.JSON(
overwrite_pydantic_type=Optional[Json[Dict[str, int]]]
) # type: ignore
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users")
id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(
max_length=255,
unique=True,
nullable=False,
overwrite_pydantic_type=pydantic.EmailStr,
)
create_test_database = init_tests(base_ormar_config)
def test_constraints():
with pytest.raises(ValidationError, match="Input should be greater than 0"):
OverwriteTest(my_int=-10)
with pytest.raises(
ValidationError,
match="Input should be a valid integer, unable to parse string as an integer",
):
OverwriteTest(my_int=10, constraint_dict={"aa": "ab"})
with pytest.raises(
ValidationError,
match=(
r"The email address is not valid. It must have exactly one @-sign|"
r"An email address must have an @-sign"
),
):
User(email="wrong")
@pytest.mark.asyncio
async def test_saving():
async with base_ormar_config.database:
await OverwriteTest(my_int=5, constraint_dict={"aa": 123}).save()
test = await OverwriteTest.objects.get()
assert test.my_int == 5
assert test.constraint_dict == {"aa": 123}
await User(email="test@as.eu").save()
test = await User.objects.get()
assert test.email == "test@as.eu"
collerek-ormar-c09209a/tests/test_model_definition/test_overwriting_sql_nullable.py 0000664 0000000 0000000 00000002476 15130200524 0031220 0 ustar 00root root 0000000 0000000 import sqlite3
from typing import Optional
import asyncpg
import ormar
import pymysql
import pytest
from sqlalchemy import text
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class PrimaryModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="primary_models")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255, index=True)
some_text: Optional[str] = ormar.Text(nullable=True, sql_nullable=False)
some_other_text: Optional[str] = ormar.String(
max_length=255, nullable=True, sql_nullable=False, server_default=text("''")
)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_create_models():
async with base_ormar_config.database:
primary = await PrimaryModel(
name="Foo", some_text="Bar", some_other_text="Baz"
).save()
assert primary.id == 1
primary2 = await PrimaryModel(name="Foo2", some_text="Bar2").save()
assert primary2.id == 2
with pytest.raises(
(
sqlite3.IntegrityError,
pymysql.IntegrityError,
asyncpg.exceptions.NotNullViolationError,
)
):
await PrimaryModel(name="Foo3").save()
collerek-ormar-c09209a/tests/test_model_definition/test_pk_field_is_always_not_null.py 0000664 0000000 0000000 00000001450 15130200524 0031635 0 ustar 00root root 0000000 0000000 import ormar
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class AutoincrementModel(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
class NonAutoincrementModel(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True, autoincrement=False)
class ExplicitNullableModel(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True, nullable=True)
create_test_database = init_tests(base_ormar_config)
def test_pk_field_is_not_null():
for model in [AutoincrementModel, NonAutoincrementModel, ExplicitNullableModel]:
assert not model.ormar_config.table.c.get("id").nullable
collerek-ormar-c09209a/tests/test_model_definition/test_properties.py 0000664 0000000 0000000 00000004012 15130200524 0026264 0 ustar 00root root 0000000 0000000 # type: ignore
import ormar
import pytest
from pydantic import PydanticUserError, computed_field
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Song(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="songs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
sort_order: int = ormar.Integer()
@computed_field
def sorted_name(self) -> str:
return f"{self.sort_order}: {self.name}"
@computed_field
def sample(self) -> str:
return "sample"
@computed_field
def sample2(self) -> str:
return "sample2"
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_sort_order_on_main_model():
async with base_ormar_config.database:
await Song.objects.create(name="Song 3", sort_order=3)
await Song.objects.create(name="Song 1", sort_order=1)
await Song.objects.create(name="Song 2", sort_order=2)
songs = await Song.objects.all()
song_dict = [song.model_dump() for song in songs]
assert all("sorted_name" in x for x in song_dict)
assert all(
x["sorted_name"] == f"{x['sort_order']}: {x['name']}" for x in song_dict
)
song_json = [song.model_dump_json() for song in songs]
assert all("sorted_name" in x for x in song_json)
check_include = songs[0].model_dump(include={"sample"})
assert "sample" in check_include
assert "sample2" not in check_include
assert "sorted_name" not in check_include
check_include = songs[0].model_dump(exclude={"sample"})
assert "sample" not in check_include
assert "sample2" in check_include
assert "sorted_name" in check_include
def test_wrong_definition():
with pytest.raises(PydanticUserError):
class WrongModel(ormar.Model): # pragma: no cover
@computed_field
def test(self, aa=10, bb=30):
pass
collerek-ormar-c09209a/tests/test_model_definition/test_pydantic_fields.py 0000664 0000000 0000000 00000007061 15130200524 0027240 0 ustar 00root root 0000000 0000000 import random
from typing import Optional
import ormar
import pytest
from pydantic import BaseModel, Field, HttpUrl
from pydantic_extra_types.payment import PaymentCardNumber
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class ModelTest(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
url: HttpUrl = "https://www.example.com" # type: ignore
number: Optional[PaymentCardNumber] = None
CARD_NUMBERS = [
"123456789007",
"123456789015",
"123456789023",
"123456789031",
"123456789049",
]
def get_number():
return random.choice(CARD_NUMBERS)
class ModelTest2(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
url: HttpUrl = "https://www.example2.com" # type: ignore
number: PaymentCardNumber = Field(default_factory=get_number)
class PydanticTest(BaseModel):
aa: str
bb: int
class ModelTest3(ormar.Model):
ormar_config = base_ormar_config.copy()
def __init__(self, **kwargs):
kwargs["number"] = get_number()
kwargs["pydantic_test"] = PydanticTest(aa="random", bb=42)
super().__init__(**kwargs)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
url: HttpUrl = "https://www.example3.com" # type: ignore
number: PaymentCardNumber
pydantic_test: PydanticTest
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_working_with_pydantic_fields():
async with base_ormar_config.database:
test = ModelTest(name="Test")
assert test.name == "Test"
assert test.url == "https://www.example.com"
assert test.number is None
test.number = "123456789015"
test.url = "https://www.sdta.ada.pt"
assert test.url == "https://www.sdta.ada.pt"
await test.save()
test_check = await ModelTest.objects.get()
assert test_check.name == "Test"
assert test_check.url == "https://www.example.com"
assert test_check.number is None
@pytest.mark.asyncio
async def test_default_factory_for_pydantic_fields():
async with base_ormar_config.database:
test = ModelTest2(name="Test2", number="4000000000000002")
assert test.name == "Test2"
assert test.url == "https://www.example2.com"
assert test.number == "4000000000000002"
test.url = "http://www.sdta.ada.pt"
assert test.url == "http://www.sdta.ada.pt"
await test.save()
test_check = await ModelTest2.objects.get()
assert test_check.name == "Test2"
assert test_check.url == "https://www.example2.com"
assert test_check.number in CARD_NUMBERS
assert test_check.number != test.number
@pytest.mark.asyncio
async def test_init_setting_for_pydantic_fields():
async with base_ormar_config.database:
test = ModelTest3(name="Test3")
assert test.name == "Test3"
assert test.url == "https://www.example3.com"
assert test.pydantic_test.bb == 42
test.url = "http://www.sdta.ada.pt"
assert test.url == "http://www.sdta.ada.pt"
await test.save()
test_check = await ModelTest3.objects.get()
assert test_check.name == "Test3"
assert test_check.url == "https://www.example3.com"
assert test_check.number in CARD_NUMBERS
assert test_check.pydantic_test.aa == "random"
collerek-ormar-c09209a/tests/test_model_definition/test_pydantic_only_fields.py 0000664 0000000 0000000 00000004027 15130200524 0030300 0 ustar 00root root 0000000 0000000 import datetime
import ormar
import pydantic
import pytest
from pydantic import computed_field
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
timestamp: datetime.datetime = pydantic.Field(default=None) # type: ignore
@computed_field
def name10(self) -> str:
return self.name + "_10"
@computed_field
def name20(self) -> str:
return self.name + "_20"
@property
def name30(self) -> str:
return self.name + "_30"
@computed_field
def name40(self) -> str:
return self.name + "_40"
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_pydantic_only_fields():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
album = await Album.objects.create(name="Hitchcock")
assert album.pk is not None
assert album.saved
assert album.timestamp is None
album = await Album.objects.exclude_fields("timestamp").get()
assert album.timestamp is None
album = await Album.objects.fields({"name", "timestamp"}).get()
assert album.timestamp is None
test_dict = album.model_dump()
assert "timestamp" in test_dict
assert test_dict["timestamp"] is None
assert album.name30 == "Hitchcock_30"
album.timestamp = datetime.datetime.now()
test_dict = album.model_dump()
assert "timestamp" in test_dict
assert test_dict["timestamp"] is not None
assert test_dict.get("name10") == "Hitchcock_10"
assert test_dict.get("name20") == "Hitchcock_20"
assert test_dict.get("name40") == "Hitchcock_40"
assert "name30" not in test_dict
collerek-ormar-c09209a/tests/test_model_definition/test_pydantic_private_attributes.py 0000664 0000000 0000000 00000001353 15130200524 0031710 0 ustar 00root root 0000000 0000000 from typing import List
import ormar
from pydantic import PrivateAttr
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Subscription(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="subscriptions")
id: int = ormar.Integer(primary_key=True)
stripe_subscription_id: str = ormar.String(nullable=False, max_length=256)
_add_payments: List[str] = PrivateAttr(default_factory=list)
def add_payment(self, payment: str):
self._add_payments.append(payment)
create_test_database = init_tests(base_ormar_config)
def test_private_attribute():
sub = Subscription(stripe_subscription_id="2312312sad231")
sub.add_payment("test")
collerek-ormar-c09209a/tests/test_model_definition/test_save_status.py 0000664 0000000 0000000 00000016673 15130200524 0026451 0 ustar 00root root 0000000 0000000 from typing import List
import ormar
import pytest
from ormar.exceptions import ModelPersistenceError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class NickNames(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
is_lame: bool = ormar.Boolean(nullable=True)
class NicksHq(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks_x_hq")
class HQ(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="hqs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
nicks: List[NickNames] = ormar.ManyToMany(NickNames, through=NicksHq)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="company_name")
founded: int = ormar.Integer(nullable=True)
hq: HQ = ormar.ForeignKey(HQ)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_instantiation_false_save_true():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
comp = Company(name="Banzai", founded=1988)
assert not comp.saved
await comp.save()
assert comp.saved
@pytest.mark.asyncio
async def test_saved_edited_not_saved():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
comp = await Company.objects.create(name="Banzai", founded=1988)
assert comp.saved
comp.name = "Banzai2"
assert not comp.saved
await comp.update()
assert comp.saved
await comp.update(name="Banzai3")
assert comp.saved
comp.pk = 999
assert not comp.saved
await comp.update()
assert comp.saved
@pytest.mark.asyncio
async def test_adding_related_gets_dirty():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
hq = await HQ.objects.create(name="Main")
comp = await Company.objects.create(name="Banzai", founded=1988)
assert comp.saved
comp.hq = hq
assert not comp.saved
await comp.update()
assert comp.saved
comp = await Company.objects.select_related("hq").get(name="Banzai")
assert comp.saved
assert comp.hq.pk == hq.pk
assert comp.hq.saved
comp.hq.name = "Suburbs"
assert not comp.hq.saved
assert comp.saved
await comp.hq.update()
assert comp.hq.saved
@pytest.mark.asyncio
async def test_adding_many_to_many_does_not_gets_dirty():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
nick1 = await NickNames.objects.create(name="Bazinga", is_lame=False)
nick2 = await NickNames.objects.create(name="Bazinga2", is_lame=True)
hq = await HQ.objects.create(name="Main")
assert hq.saved
await hq.nicks.add(nick1)
assert hq.saved
await hq.nicks.add(nick2)
assert hq.saved
hq = await HQ.objects.select_related("nicks").get(name="Main")
assert hq.saved
assert hq.nicks[0].saved
await hq.nicks.remove(nick1)
assert hq.saved
hq.nicks[0].name = "Kabucha"
assert not hq.nicks[0].saved
await hq.nicks[0].update()
assert hq.nicks[0].saved
@pytest.mark.asyncio
async def test_delete():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
comp = await Company.objects.create(name="Banzai", founded=1988)
assert comp.saved
await comp.delete()
assert not comp.saved
await comp.update()
assert comp.saved
@pytest.mark.asyncio
async def test_load():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
comp = await Company.objects.create(name="Banzai", founded=1988)
assert comp.saved
comp.name = "AA"
assert not comp.saved
await comp.load()
assert comp.saved
assert comp.name == "Banzai"
@pytest.mark.asyncio
async def test_queryset_methods():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await Company.objects.create(name="Banzai", founded=1988)
await Company.objects.create(name="Yuhu", founded=1989)
await Company.objects.create(name="Konono", founded=1990)
await Company.objects.create(name="Sumaaa", founded=1991)
comp = await Company.objects.get(name="Banzai")
assert comp.saved
comp = await Company.objects.first()
assert comp.saved
comps = await Company.objects.all()
assert [comp.saved for comp in comps]
comp2, created = await Company.objects.get_or_create(
name="Banzai_new", founded=2001
)
assert comp2.saved
assert created is True
comp3, created = await Company.objects.get_or_create(
name="Banzai", founded=1988
)
assert comp3.saved
assert comp3.pk == comp.pk
assert created is False
update_dict = comp.model_dump()
update_dict["founded"] = 2010
comp = await Company.objects.update_or_create(**update_dict)
assert comp.saved
assert comp.founded == 2010
create_dict = {"name": "Yoko", "founded": 2005}
comp = await Company.objects.update_or_create(**create_dict)
assert comp.saved
assert comp.founded == 2005
@pytest.mark.asyncio
async def test_bulk_methods():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
c1 = Company(name="Banzai", founded=1988)
c2 = Company(name="Yuhu", founded=1989)
await Company.objects.bulk_create([c1, c2])
assert c1.saved
assert c2.saved
c1, c2 = await Company.objects.all()
c1.name = "Banzai2"
c2.name = "Yuhu2"
assert not c1.saved
assert not c2.saved
await Company.objects.bulk_update([c1, c2])
assert c1.saved
assert c2.saved
c3 = Company(name="Cobra", founded=2088)
assert not c3.saved
with pytest.raises(ModelPersistenceError):
await c3.update()
await c3.upsert()
assert c3.saved
c3.name = "Python"
assert not c3.saved
await c3.upsert()
assert c3.saved
assert c3.name == "Python"
await c3.upsert(founded=2077)
assert c3.saved
assert c3.founded == 2077
collerek-ormar-c09209a/tests/test_model_definition/test_saving_nullable_fields.py 0000664 0000000 0000000 00000003127 15130200524 0030571 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class PrimaryModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="primary_models")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255, index=True)
some_text: str = ormar.Text()
# NOTE: Removing nullable=True makes the test pass.
some_other_text: Optional[str] = ormar.Text(nullable=True)
class SecondaryModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="secondary_models")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
primary_model: PrimaryModel = ormar.ForeignKey(
PrimaryModel, related_name="secondary_models"
)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_create_models():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
primary = await PrimaryModel(
name="Foo", some_text="Bar", some_other_text="Baz"
).save()
assert primary.id == 1
secondary = await SecondaryModel(name="Foo", primary_model=primary).save()
assert secondary.id == 1
assert secondary.primary_model.id == 1
secondary = await SecondaryModel.objects.get()
assert secondary.name == "Foo"
await secondary.update(name="Updated")
assert secondary.name == "Updated"
collerek-ormar-c09209a/tests/test_model_definition/test_server_default.py 0000664 0000000 0000000 00000004446 15130200524 0027115 0 ustar 00root root 0000000 0000000 import time
from datetime import datetime
import ormar
import pytest
from sqlalchemy import func, text
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Product(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="product")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
company: str = ormar.String(max_length=200, server_default="Acme")
sort_order: int = ormar.Integer(server_default=text("10"))
created: datetime = ormar.DateTime(server_default=func.now())
create_test_database = init_tests(base_ormar_config)
def test_table_defined_properly():
assert Product.ormar_config.model_fields["created"].nullable
assert not Product.model_fields["created"].is_required()
assert (
Product.ormar_config.table.columns["created"].server_default.arg.name == "now"
)
@pytest.mark.asyncio
async def test_model_creation():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
p1 = Product(name="Test")
assert p1.created is None
await p1.save()
await p1.load()
assert p1.created is not None
assert p1.company == "Acme"
assert p1.sort_order == 10
date = datetime.strptime("2020-10-27 11:30", "%Y-%m-%d %H:%M")
p3 = await Product.objects.create(
name="Test2", created=date, company="Roadrunner", sort_order=1
)
assert p3.created is not None
assert p3.created == date
assert p1.created != p3.created
assert p3.company == "Roadrunner"
assert p3.sort_order == 1
p3 = await Product.objects.get(name="Test2")
assert p3.company == "Roadrunner"
assert p3.sort_order == 1
time.sleep(1)
p2 = await Product.objects.create(name="Test3")
assert p2.created is not None
assert p2.company == "Acme"
assert p2.sort_order == 10
if Product.db_backend_name() != "postgresql":
# postgres use transaction timestamp so it will remain the same
assert p1.created != p2.created # pragma nocover
collerek-ormar-c09209a/tests/test_model_definition/test_setting_comments_in_db.py 0000664 0000000 0000000 00000001263 15130200524 0030612 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from ormar.models import Model
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Comment(Model):
ormar_config = base_ormar_config.copy(tablename="comments")
test: int = ormar.Integer(primary_key=True, comment="primary key of comments")
test_string: str = ormar.String(max_length=250, comment="test that it works")
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_comments_are_set_in_db():
columns = Comment.ormar_config.table.c
for c in columns:
assert c.comment == Comment.ormar_config.model_fields[c.name].comment
collerek-ormar-c09209a/tests/test_model_definition/test_through_model_relation_setup_on_clone.py 0000664 0000000 0000000 00000001422 15130200524 0033723 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id = ormar.Integer(primary_key=True)
name = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id = ormar.Integer(primary_key=True)
title = ormar.String(max_length=100)
author = ormar.ManyToMany(
Author,
)
year = ormar.Integer(nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_tables_are_created():
async with base_ormar_config.database:
assert await Book.objects.all() == []
collerek-ormar-c09209a/tests/test_model_methods/ 0000775 0000000 0000000 00000000000 15130200524 0021775 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_model_methods/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0024074 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_model_methods/test_excludes_in_load_all.py 0000664 0000000 0000000 00000003645 15130200524 0027547 0 ustar 00root root 0000000 0000000 import uuid
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class JimmyUser(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="jimmy_users")
id: uuid.UUID = ormar.UUID(
primary_key=True, default=uuid.uuid4(), uuid_format="string"
)
class JimmyProfile(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="jimmy_profiles")
id: uuid.UUID = ormar.UUID(
primary_key=True, default=uuid.uuid4(), uuid_format="string"
)
name = ormar.String(max_length=42, default="JimmyProfile")
user: JimmyUser = ormar.ForeignKey(to=JimmyUser)
class JimmyAccount(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="jimmy_accounts")
id: uuid.UUID = ormar.UUID(
primary_key=True, default=uuid.uuid4(), uuid_format="string"
)
name = ormar.String(max_length=42, default="JimmyAccount")
user: JimmyUser = ormar.ForeignKey(to=JimmyUser)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_excluding_one_relation():
async with base_ormar_config.database:
user = JimmyUser()
await user.save()
await JimmyAccount(user=user).save()
await JimmyProfile(user=user).save()
await user.load_all(exclude={"jimmyprofiles"})
assert hasattr(user.jimmyaccounts[0], "name")
assert len(user.jimmyprofiles) == 0
@pytest.mark.asyncio
async def test_excluding_other_relation():
async with base_ormar_config.database:
user = JimmyUser()
await user.save()
await JimmyAccount(user=user).save()
await JimmyProfile(user=user).save()
await user.load_all(exclude={"jimmyaccounts"})
assert await JimmyProfile.objects.get()
assert hasattr(user.jimmyprofiles[0], "name")
assert len(user.jimmyaccounts) == 0
collerek-ormar-c09209a/tests/test_model_methods/test_load_all.py 0000664 0000000 0000000 00000021120 15130200524 0025151 0 ustar 00root root 0000000 0000000 from typing import List
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Language(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="languages")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
level: str = ormar.String(max_length=150, default="Beginner")
class CringeLevel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="levels")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
language = ormar.ForeignKey(Language)
class NickName(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
is_lame: bool = ormar.Boolean(nullable=True)
level: CringeLevel = ormar.ForeignKey(CringeLevel)
class HQ(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="hqs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
nicks: List[NickName] = ormar.ManyToMany(NickName)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="company_name")
founded: int = ormar.Integer(nullable=True)
hq: HQ = ormar.ForeignKey(HQ, related_name="companies")
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_load_all_fk_rel():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
hq = await HQ.objects.create(name="Main")
company = await Company.objects.create(name="Banzai", founded=1988, hq=hq)
hq = await HQ.objects.get(name="Main")
await hq.load_all()
assert hq.companies[0] == company
assert hq.companies[0].name == "Banzai"
assert hq.companies[0].founded == 1988
hq2 = await HQ.objects.select_all().get(name="Main")
assert hq2.companies[0] == company
assert hq2.companies[0].name == "Banzai"
assert hq2.companies[0].founded == 1988
@pytest.mark.asyncio
async def test_load_all_many_to_many():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
nick1 = await NickName.objects.create(name="BazingaO", is_lame=False)
nick2 = await NickName.objects.create(name="Bazinga20", is_lame=True)
hq = await HQ.objects.create(name="Main")
await hq.nicks.add(nick1)
await hq.nicks.add(nick2)
hq = await HQ.objects.get(name="Main")
await hq.load_all()
assert hq.nicks[0] == nick1
assert hq.nicks[0].name == "BazingaO"
assert hq.nicks[1] == nick2
assert hq.nicks[1].name == "Bazinga20"
hq2 = await HQ.objects.select_all().get(name="Main")
assert hq2.nicks[0] == nick1
assert hq2.nicks[0].name == "BazingaO"
assert hq2.nicks[1] == nick2
assert hq2.nicks[1].name == "Bazinga20"
@pytest.mark.asyncio
async def test_load_all_with_order():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
nick1 = await NickName.objects.create(name="Barry", is_lame=False)
nick2 = await NickName.objects.create(name="Joe", is_lame=True)
hq = await HQ.objects.create(name="Main")
await hq.nicks.add(nick1)
await hq.nicks.add(nick2)
hq = await HQ.objects.get(name="Main")
await hq.load_all(order_by="-nicks__name")
assert hq.nicks[0] == nick2
assert hq.nicks[0].name == "Joe"
assert hq.nicks[1] == nick1
assert hq.nicks[1].name == "Barry"
await hq.load_all()
assert hq.nicks[0] == nick1
assert hq.nicks[1] == nick2
hq2 = (
await HQ.objects.select_all().order_by("-nicks__name").get(name="Main")
)
assert hq2.nicks[0] == nick2
assert hq2.nicks[1] == nick1
hq3 = await HQ.objects.select_all().get(name="Main")
assert hq3.nicks[0] == nick1
assert hq3.nicks[1] == nick2
@pytest.mark.asyncio
async def test_loading_reversed_relation():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
hq = await HQ.objects.create(name="Main")
await Company.objects.create(name="Banzai", founded=1988, hq=hq)
company = await Company.objects.get(name="Banzai")
await company.load_all()
assert company.hq == hq
company2 = await Company.objects.select_all().get(name="Banzai")
assert company2.hq == hq
@pytest.mark.asyncio
async def test_loading_nested():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
language = await Language.objects.create(name="English")
level = await CringeLevel.objects.create(name="High", language=language)
level2 = await CringeLevel.objects.create(name="Low", language=language)
nick1 = await NickName.objects.create(
name="BazingaO", is_lame=False, level=level
)
nick2 = await NickName.objects.create(
name="Bazinga20", is_lame=True, level=level2
)
hq = await HQ.objects.create(name="Main")
await hq.nicks.add(nick1)
await hq.nicks.add(nick2)
hq = await HQ.objects.get(name="Main")
await hq.load_all(follow=True)
assert hq.nicks[0] == nick1
assert hq.nicks[0].name == "BazingaO"
assert hq.nicks[0].level.name == "High"
assert hq.nicks[0].level.language.name == "English"
assert hq.nicks[1] == nick2
assert hq.nicks[1].name == "Bazinga20"
assert hq.nicks[1].level.name == "Low"
assert hq.nicks[1].level.language.name == "English"
hq2 = await HQ.objects.select_all(follow=True).get(name="Main")
assert hq2.nicks[0] == nick1
assert hq2.nicks[0].name == "BazingaO"
assert hq2.nicks[0].level.name == "High"
assert hq2.nicks[0].level.language.name == "English"
assert hq2.nicks[1] == nick2
assert hq2.nicks[1].name == "Bazinga20"
assert hq2.nicks[1].level.name == "Low"
assert hq2.nicks[1].level.language.name == "English"
hq5 = await HQ.objects.select_all().get(name="Main")
assert len(hq5.nicks) == 2
await hq5.nicks.select_all(follow=True).all()
assert hq5.nicks[0] == nick1
assert hq5.nicks[0].name == "BazingaO"
assert hq5.nicks[0].level.name == "High"
assert hq5.nicks[0].level.language.name == "English"
assert hq5.nicks[1] == nick2
assert hq5.nicks[1].name == "Bazinga20"
assert hq5.nicks[1].level.name == "Low"
assert hq5.nicks[1].level.language.name == "English"
await hq.load_all(follow=True, exclude="nicks__level__language")
assert len(hq.nicks) == 2
assert hq.nicks[0].level.language is None
assert hq.nicks[1].level.language is None
hq3 = (
await HQ.objects.select_all(follow=True)
.exclude_fields("nicks__level__language")
.get(name="Main")
)
assert len(hq3.nicks) == 2
assert hq3.nicks[0].level.language is None
assert hq3.nicks[1].level.language is None
await hq.load_all(follow=True, exclude="nicks__level__language__level")
assert len(hq.nicks) == 2
assert hq.nicks[0].level.language is not None
assert hq.nicks[0].level.language.level is None
assert hq.nicks[1].level.language is not None
assert hq.nicks[1].level.language.level is None
await hq.load_all(follow=True, exclude="nicks__level")
assert len(hq.nicks) == 2
assert hq.nicks[0].level is None
assert hq.nicks[1].level is None
await hq.load_all(follow=True, exclude="nicks")
assert len(hq.nicks) == 0
collerek-ormar-c09209a/tests/test_model_methods/test_populate_default_values.py 0000664 0000000 0000000 00000001645 15130200524 0030330 0 ustar 00root root 0000000 0000000 import ormar
from sqlalchemy import text
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Task(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tasks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(
max_length=255, minimum=0, server_default=text("'Default Name'"), nullable=False
)
points: int = ormar.Integer(
default=0, minimum=0, server_default=text("0"), nullable=False
)
score: int = ormar.Integer(default=5)
create_test_database = init_tests(base_ormar_config)
def test_populate_default_values():
new_kwargs = {
"id": None,
"name": "",
"points": 0,
}
result = Task.populate_default_values(new_kwargs)
assert result["id"] is None
assert result["name"] == ""
assert result["points"] == 0
assert result["score"] == 5
collerek-ormar-c09209a/tests/test_model_methods/test_save_related.py 0000664 0000000 0000000 00000016521 15130200524 0026051 0 ustar 00root root 0000000 0000000 from typing import List
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class CringeLevel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="levels")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class NickName(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
is_lame: bool = ormar.Boolean(nullable=True)
level: CringeLevel = ormar.ForeignKey(CringeLevel)
class NicksHq(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks_x_hq")
class HQ(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="hqs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
nicks: List[NickName] = ormar.ManyToMany(NickName, through=NicksHq)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="company_name")
founded: int = ormar.Integer(nullable=True)
hq: HQ = ormar.ForeignKey(HQ, related_name="companies")
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_saving_related_fk_rel():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
hq = await HQ.objects.create(name="Main")
comp = await Company.objects.create(name="Banzai", founded=1988, hq=hq)
assert comp.saved
count = await comp.save_related()
assert count == 0
comp.hq.name = "Suburbs"
assert not comp.hq.saved
assert comp.saved
count = await comp.save_related()
assert count == 1
assert comp.hq.saved
comp.hq.name = "Suburbs 2"
assert not comp.hq.saved
assert comp.saved
count = await comp.save_related(exclude={"hq"})
assert count == 0
assert not comp.hq.saved
@pytest.mark.asyncio
async def test_saving_many_to_many():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
nick1 = await NickName.objects.create(name="BazingaO", is_lame=False)
nick2 = await NickName.objects.create(name="Bazinga20", is_lame=True)
hq = await HQ.objects.create(name="Main")
assert hq.saved
await hq.nicks.add(nick1)
assert hq.saved
await hq.nicks.add(nick2)
assert hq.saved
count = await hq.save_related()
assert count == 0
count = await hq.save_related(save_all=True)
assert count == 3
hq.nicks[0].name = "Kabucha"
hq.nicks[1].name = "Kabucha2"
assert not hq.nicks[0].saved
assert not hq.nicks[1].saved
count = await hq.save_related()
assert count == 2
assert hq.nicks[0].saved
assert hq.nicks[1].saved
hq.nicks[0].name = "Kabucha a"
hq.nicks[1].name = "Kabucha2 a"
assert not hq.nicks[0].saved
assert not hq.nicks[1].saved
count = await hq.save_related(exclude={"nicks": ...})
assert count == 0
assert not hq.nicks[0].saved
assert not hq.nicks[1].saved
@pytest.mark.asyncio
async def test_saving_reversed_relation():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
hq = await HQ.objects.create(name="Main")
await Company.objects.create(name="Banzai", founded=1988, hq=hq)
hq = await HQ.objects.select_related("companies").get(name="Main")
assert hq.saved
assert hq.companies[0].saved
hq.companies[0].name = "Konichiwa"
assert not hq.companies[0].saved
count = await hq.save_related()
assert count == 1
assert hq.companies[0].saved
await Company.objects.create(name="Joshua", founded=1888, hq=hq)
hq = await HQ.objects.select_related("companies").get(name="Main")
assert hq.saved
assert hq.companies[0].saved
assert hq.companies[1].saved
hq.companies[0].name = hq.companies[0].name + "20"
assert not hq.companies[0].saved
# save only if not saved so now only one
count = await hq.save_related()
assert count == 1
assert hq.companies[0].saved
hq.companies[0].name = hq.companies[0].name + "20"
hq.companies[1].name = hq.companies[1].name + "30"
assert not hq.companies[0].saved
assert not hq.companies[1].saved
count = await hq.save_related()
assert count == 2
assert hq.companies[0].saved
assert hq.companies[1].saved
@pytest.mark.asyncio
async def test_saving_nested():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
level = await CringeLevel.objects.create(name="High")
level2 = await CringeLevel.objects.create(name="Low")
nick1 = await NickName.objects.create(
name="BazingaO", is_lame=False, level=level
)
nick2 = await NickName.objects.create(
name="Bazinga20", is_lame=True, level=level2
)
hq = await HQ.objects.create(name="Main")
assert hq.saved
await hq.nicks.add(nick1)
assert hq.saved
await hq.nicks.add(nick2)
assert hq.saved
count = await hq.save_related()
assert count == 0
hq.nicks[0].level.name = "Medium"
assert not hq.nicks[0].level.saved
assert hq.nicks[0].saved
count = await hq.save_related(follow=True)
assert count == 1
assert hq.nicks[0].saved
assert hq.nicks[0].level.saved
hq.nicks[0].level.name = "Low"
hq.nicks[1].level.name = "Medium"
assert not hq.nicks[0].level.saved
assert not hq.nicks[1].level.saved
assert hq.nicks[0].saved
assert hq.nicks[1].saved
count = await hq.save_related(follow=True)
assert count == 2
assert hq.nicks[0].saved
assert hq.nicks[0].level.saved
assert hq.nicks[1].saved
assert hq.nicks[1].level.saved
hq.nicks[0].level.name = "Low 2"
hq.nicks[1].level.name = "Medium 2"
assert not hq.nicks[0].level.saved
assert not hq.nicks[1].level.saved
assert hq.nicks[0].saved
assert hq.nicks[1].saved
count = await hq.save_related(follow=True, exclude={"nicks": {"level"}})
assert count == 0
assert hq.nicks[0].saved
assert not hq.nicks[0].level.saved
assert hq.nicks[1].saved
assert not hq.nicks[1].level.saved
collerek-ormar-c09209a/tests/test_model_methods/test_save_related_from_dict.py 0000664 0000000 0000000 00000021220 15130200524 0030067 0 ustar 00root root 0000000 0000000 from typing import List
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class CringeLevel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="levels")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class NickName(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
is_lame: bool = ormar.Boolean(nullable=True)
level: CringeLevel = ormar.ForeignKey(CringeLevel)
class NicksHq(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks_x_hq")
id: int = ormar.Integer(primary_key=True)
new_field: str = ormar.String(max_length=200, nullable=True)
class HQ(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="hqs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
nicks: List[NickName] = ormar.ManyToMany(NickName, through=NicksHq)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="company_name")
founded: int = ormar.Integer(nullable=True)
hq: HQ = ormar.ForeignKey(HQ, related_name="companies")
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_saving_related_reverse_fk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
payload = {"companies": [{"name": "Banzai"}], "name": "Main"}
hq = HQ(**payload)
count = await hq.save_related(follow=True, save_all=True)
assert count == 2
hq_check = await HQ.objects.select_related("companies").get()
assert hq_check.pk is not None
assert hq_check.name == "Main"
assert len(hq_check.companies) == 1
assert hq_check.companies[0].name == "Banzai"
assert hq_check.companies[0].pk is not None
@pytest.mark.asyncio
async def test_saving_related_reverse_fk_multiple():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
payload = {
"companies": [{"name": "Banzai"}, {"name": "Yamate"}],
"name": "Main",
}
hq = HQ(**payload)
count = await hq.save_related(follow=True, save_all=True)
assert count == 3
hq_check = await HQ.objects.select_related("companies").get()
assert hq_check.pk is not None
assert hq_check.name == "Main"
assert len(hq_check.companies) == 2
assert hq_check.companies[0].name == "Banzai"
assert hq_check.companies[0].pk is not None
assert hq_check.companies[1].name == "Yamate"
assert hq_check.companies[1].pk is not None
@pytest.mark.asyncio
async def test_saving_related_fk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
payload = {"hq": {"name": "Main"}, "name": "Banzai"}
comp = Company(**payload)
count = await comp.save_related(follow=True, save_all=True)
assert count == 2
comp_check = await Company.objects.select_related("hq").get()
assert comp_check.pk is not None
assert comp_check.name == "Banzai"
assert comp_check.hq.name == "Main"
assert comp_check.hq.pk is not None
@pytest.mark.asyncio
async def test_saving_many_to_many_wo_through():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
payload = {
"name": "Main",
"nicks": [
{"name": "Bazinga0", "is_lame": False},
{"name": "Bazinga20", "is_lame": True},
],
}
hq = HQ(**payload)
count = await hq.save_related()
assert count == 3
hq_check = await HQ.objects.select_related("nicks").get()
assert hq_check.pk is not None
assert len(hq_check.nicks) == 2
assert hq_check.nicks[0].name == "Bazinga0"
assert hq_check.nicks[1].name == "Bazinga20"
@pytest.mark.asyncio
async def test_saving_many_to_many_with_through():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
async with base_ormar_config.database.transaction(force_rollback=True):
payload = {
"name": "Main",
"nicks": [
{
"name": "Bazinga0",
"is_lame": False,
"nickshq": {"new_field": "test"},
},
{
"name": "Bazinga20",
"is_lame": True,
"nickshq": {"new_field": "test2"},
},
],
}
hq = HQ(**payload)
count = await hq.save_related()
assert count == 3
hq_check = await HQ.objects.select_related("nicks").get()
assert hq_check.pk is not None
assert len(hq_check.nicks) == 2
assert hq_check.nicks[0].name == "Bazinga0"
assert hq_check.nicks[0].nickshq.new_field == "test"
assert hq_check.nicks[1].name == "Bazinga20"
assert hq_check.nicks[1].nickshq.new_field == "test2"
@pytest.mark.asyncio
async def test_saving_nested_with_m2m_and_rev_fk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
payload = {
"name": "Main",
"nicks": [
{"name": "Bazinga0", "is_lame": False, "level": {"name": "High"}},
{"name": "Bazinga20", "is_lame": True, "level": {"name": "Low"}},
],
}
hq = HQ(**payload)
count = await hq.save_related(follow=True, save_all=True)
assert count == 5
hq_check = await HQ.objects.select_related("nicks__level").get()
assert hq_check.pk is not None
assert len(hq_check.nicks) == 2
assert hq_check.nicks[0].name == "Bazinga0"
assert hq_check.nicks[0].level.name == "High"
assert hq_check.nicks[1].name == "Bazinga20"
assert hq_check.nicks[1].level.name == "Low"
@pytest.mark.asyncio
async def test_saving_nested_with_m2m_and_rev_fk_and_through():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
payload = {
"hq": {
"name": "Yoko",
"nicks": [
{
"name": "Bazinga0",
"is_lame": False,
"nickshq": {"new_field": "test"},
"level": {"name": "High"},
},
{
"name": "Bazinga20",
"is_lame": True,
"nickshq": {"new_field": "test2"},
"level": {"name": "Low"},
},
],
},
"name": "Main",
}
company = Company(**payload)
count = await company.save_related(follow=True, save_all=True)
assert count == 6
company_check = await Company.objects.select_related(
"hq__nicks__level"
).get()
assert company_check.pk is not None
assert company_check.name == "Main"
assert company_check.hq.name == "Yoko"
assert len(company_check.hq.nicks) == 2
assert company_check.hq.nicks[0].name == "Bazinga0"
assert company_check.hq.nicks[0].nickshq.new_field == "test"
assert company_check.hq.nicks[0].level.name == "High"
assert company_check.hq.nicks[1].name == "Bazinga20"
assert company_check.hq.nicks[1].level.name == "Low"
assert company_check.hq.nicks[1].nickshq.new_field == "test2"
collerek-ormar-c09209a/tests/test_model_methods/test_save_related_pk_only.py 0000664 0000000 0000000 00000001346 15130200524 0027603 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class A(ormar.Model):
ormar_config = base_ormar_config.copy()
id = ormar.Integer(primary_key=True)
class B(ormar.Model):
ormar_config = base_ormar_config.copy()
id = ormar.Integer(primary_key=True)
a = ormar.ForeignKey(A)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_saving_related_pk_only():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
a = A()
await a.save()
await a.save_related(follow=True, save_all=True)
collerek-ormar-c09209a/tests/test_model_methods/test_save_related_uuid.py 0000664 0000000 0000000 00000004023 15130200524 0027071 0 ustar 00root root 0000000 0000000 import uuid
from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Department(ormar.Model):
ormar_config = base_ormar_config.copy()
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
department_name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = base_ormar_config.copy()
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
course_name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean()
department: Optional[Department] = ormar.ForeignKey(Department)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
name: str = ormar.String(max_length=100)
courses = ormar.ManyToMany(Course)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_uuid_pk_in_save_related():
async with base_ormar_config.database:
to_save = {
"department_name": "Ormar",
"courses": [
{
"course_name": "basic1",
"completed": True,
"students": [{"name": "Abi"}, {"name": "Jack"}],
},
{
"course_name": "basic2",
"completed": True,
"students": [{"name": "Kate"}, {"name": "Miranda"}],
},
],
}
department = Department(**to_save)
await department.save_related(follow=True, save_all=True)
department_check = (
await Department.objects.select_all(follow=True)
.order_by(Department.courses.students.name.asc())
.get()
)
to_exclude = {
"id": ...,
"courses": {"id": ..., "students": {"id", "studentcourse"}},
}
assert department_check.model_dump(exclude=to_exclude) == to_save
collerek-ormar-c09209a/tests/test_model_methods/test_update.py 0000664 0000000 0000000 00000005721 15130200524 0024675 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Director(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="directors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="first_name")
last_name: str = ormar.String(max_length=100, nullable=False, name="last_name")
class Movie(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="movies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="title")
year: int = ormar.Integer()
profit: float = ormar.Float()
director: Optional[Director] = ormar.ForeignKey(Director)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_updating_selected_columns():
async with base_ormar_config.database:
director1 = await Director(name="Peter", last_name="Jackson").save()
director2 = await Director(name="James", last_name="Cameron").save()
lotr = await Movie(
name="LOTR", year=2001, director=director1, profit=1.140
).save()
lotr.name = "Lord of The Rings"
lotr.year = 2003
lotr.profit = 1.212
await lotr.update(_columns=["name"])
# before reload the field has current value even if not saved
assert lotr.year == 2003
lotr = await Movie.objects.get()
assert lotr.name == "Lord of The Rings"
assert lotr.year == 2001
assert round(lotr.profit, 3) == 1.140
assert lotr.director.pk == director1.pk
lotr.year = 2003
lotr.profit = 1.212
lotr.director = director2
await lotr.update(_columns=["year", "profit"])
lotr = await Movie.objects.get()
assert lotr.year == 2003
assert round(lotr.profit, 3) == 1.212
assert lotr.director.pk == director1.pk
@pytest.mark.asyncio
async def test_not_passing_columns_or_empty_list_saves_all():
async with base_ormar_config.database:
director = await Director(name="James", last_name="Cameron").save()
terminator = await Movie(
name="Terminator", year=1984, director=director, profit=0.078
).save()
terminator.name = "Terminator 2"
terminator.year = 1991
terminator.profit = 0.520
await terminator.update(_columns=[])
terminator = await Movie.objects.get()
assert terminator.name == "Terminator 2"
assert terminator.year == 1991
assert round(terminator.profit, 3) == 0.520
terminator.name = "Terminator 3"
terminator.year = 2003
terminator.profit = 0.433
await terminator.update()
terminator = await terminator.load()
assert terminator.name == "Terminator 3"
assert terminator.year == 2003
assert round(terminator.profit, 3) == 0.433
collerek-ormar-c09209a/tests/test_model_methods/test_upsert.py 0000664 0000000 0000000 00000003007 15130200524 0024730 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Director(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="directors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="first_name")
last_name: str = ormar.String(max_length=100, nullable=False, name="last_name")
class Movie(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="movies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="title")
year: int = ormar.Integer()
profit: float = ormar.Float()
director: Optional[Director] = ormar.ForeignKey(Director)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_updating_selected_columns():
async with base_ormar_config.database:
director1 = await Director(name="Peter", last_name="Jackson").save()
await Movie(
id=1, name="Lord of The Rings", year=2003, director=director1, profit=1.212
).upsert()
with pytest.raises(ormar.NoMatch):
await Movie.objects.get()
await Movie(
id=1, name="Lord of The Rings", year=2003, director=director1, profit=1.212
).upsert(__force_save__=True)
lotr = await Movie.objects.get()
assert lotr.year == 2003
assert lotr.name == "Lord of The Rings"
collerek-ormar-c09209a/tests/test_ordering/ 0000775 0000000 0000000 00000000000 15130200524 0020763 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_ordering/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0023062 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_ordering/test_default_model_order.py 0000664 0000000 0000000 00000006242 15130200524 0026377 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors", order_by=["-name"])
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="books", order_by=["year", "-ranking"]
)
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
ranking: int = ormar.Integer(nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(autouse=True, scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
await Book.objects.delete(each=True)
await Author.objects.delete(each=True)
@pytest.mark.asyncio
async def test_default_orders_is_applied():
async with base_ormar_config.database:
tolkien = await Author(name="J.R.R. Tolkien").save()
sapkowski = await Author(name="Andrzej Sapkowski").save()
king = await Author(name="Stephen King").save()
lewis = await Author(name="C.S Lewis").save()
authors = await Author.objects.all()
assert authors[0] == king
assert authors[1] == tolkien
assert authors[2] == lewis
assert authors[3] == sapkowski
authors = await Author.objects.order_by("name").all()
assert authors[3] == king
assert authors[2] == tolkien
assert authors[1] == lewis
assert authors[0] == sapkowski
@pytest.mark.asyncio
async def test_default_orders_is_applied_on_related():
async with base_ormar_config.database:
tolkien = await Author(name="J.R.R. Tolkien").save()
silmarillion = await Book(
author=tolkien, title="The Silmarillion", year=1977
).save()
lotr = await Book(
author=tolkien, title="The Lord of the Rings", year=1955
).save()
hobbit = await Book(author=tolkien, title="The Hobbit", year=1933).save()
await tolkien.books.all()
assert tolkien.books[0] == hobbit
assert tolkien.books[1] == lotr
assert tolkien.books[2] == silmarillion
await tolkien.books.order_by("-title").all()
assert tolkien.books[2] == hobbit
assert tolkien.books[1] == lotr
assert tolkien.books[0] == silmarillion
@pytest.mark.asyncio
async def test_default_orders_is_applied_on_related_two_fields():
async with base_ormar_config.database:
sanders = await Author(name="Brandon Sanderson").save()
twok = await Book(
author=sanders, title="The Way of Kings", year=2010, ranking=10
).save()
bret = await Author(name="Peter V. Bret").save()
tds = await Book(
author=bret, title="The Desert Spear", year=2010, ranking=9
).save()
books = await Book.objects.all()
assert books[0] == twok
assert books[1] == tds
collerek-ormar-c09209a/tests/test_ordering/test_default_relation_order.py 0000664 0000000 0000000 00000010270 15130200524 0027110 0 ustar 00root root 0000000 0000000 from typing import List, Optional
from uuid import UUID, uuid4
import ormar
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(
Author, orders_by=["name"], related_orders_by=["-year"]
)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
ranking: int = ormar.Integer(nullable=True)
class Animal(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="animals")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
name: str = ormar.String(max_length=200)
specie: str = ormar.String(max_length=200)
class Human(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="humans")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
name: str = ormar.Text(default="")
pets: List[Animal] = ormar.ManyToMany(
Animal,
related_name="care_takers",
orders_by=["specie", "-name"],
related_orders_by=["name"],
)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(autouse=True, scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
await Book.objects.delete(each=True)
await Author.objects.delete(each=True)
@pytest.mark.asyncio
async def test_default_orders_is_applied_from_reverse_relation():
async with base_ormar_config.database:
tolkien = await Author(name="J.R.R. Tolkien").save()
hobbit = await Book(author=tolkien, title="The Hobbit", year=1933).save()
silmarillion = await Book(
author=tolkien, title="The Silmarillion", year=1977
).save()
lotr = await Book(
author=tolkien, title="The Lord of the Rings", year=1955
).save()
tolkien = await Author.objects.select_related("books").get()
assert tolkien.books[2] == hobbit
assert tolkien.books[1] == lotr
assert tolkien.books[0] == silmarillion
@pytest.mark.asyncio
async def test_default_orders_is_applied_from_relation():
async with base_ormar_config.database:
bret = await Author(name="Peter V. Bret").save()
tds = await Book(
author=bret, title="The Desert Spear", year=2010, ranking=9
).save()
sanders = await Author(name="Brandon Sanderson").save()
twok = await Book(
author=sanders, title="The Way of Kings", year=2010, ranking=10
).save()
books = await Book.objects.order_by("year").select_related("author").all()
assert books[0] == twok
assert books[1] == tds
@pytest.mark.asyncio
async def test_default_orders_is_applied_from_relation_on_m2m():
async with base_ormar_config.database:
alice = await Human(name="Alice").save()
spot = await Animal(name="Spot", specie="Cat").save()
zkitty = await Animal(name="ZKitty", specie="Cat").save()
noodle = await Animal(name="Noodle", specie="Anaconda").save()
await alice.pets.add(noodle)
await alice.pets.add(spot)
await alice.pets.add(zkitty)
await alice.load_all()
assert alice.pets[0] == noodle
assert alice.pets[1] == zkitty
assert alice.pets[2] == spot
@pytest.mark.asyncio
async def test_default_orders_is_applied_from_reverse_relation_on_m2m():
async with base_ormar_config.database:
max = await Animal(name="Max", specie="Dog").save()
joe = await Human(name="Joe").save()
zack = await Human(name="Zack").save()
julia = await Human(name="Julia").save()
await max.care_takers.add(joe)
await max.care_takers.add(zack)
await max.care_takers.add(julia)
await max.load_all()
assert max.care_takers[0] == joe
assert max.care_takers[1] == julia
assert max.care_takers[2] == zack
collerek-ormar-c09209a/tests/test_ordering/test_default_through_relation_order.py 0000664 0000000 0000000 00000025460 15130200524 0030657 0 ustar 00root root 0000000 0000000 from typing import Any, Dict, List, Tuple, Type, cast
from uuid import UUID, uuid4
import ormar
import pytest
from ormar import (
Model,
ModelDefinitionError,
QuerySet,
pre_relation_remove,
pre_save,
pre_update,
)
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Animal(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="animals")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
name: str = ormar.Text(default="")
# favoriteHumans
class Link(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="link_table")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
animal_order: int = ormar.Integer(nullable=True)
human_order: int = ormar.Integer(nullable=True)
class Human(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="humans")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
name: str = ormar.Text(default="")
favoriteAnimals: List[Animal] = ormar.ManyToMany(
Animal,
through=Link,
related_name="favoriteHumans",
orders_by=["link__animal_order"],
related_orders_by=["link__human_order"],
)
class Human2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="humans2")
id: UUID = ormar.UUID(primary_key=True, default=uuid4)
name: str = ormar.Text(default="")
favoriteAnimals: List[Animal] = ormar.ManyToMany(
Animal, related_name="favoriteHumans2", orders_by=["link__animal_order__fail"]
)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_ordering_by_through_fail():
async with base_ormar_config.database:
alice = await Human2(name="Alice").save()
spot = await Animal(name="Spot").save()
await alice.favoriteAnimals.add(spot)
with pytest.raises(ModelDefinitionError):
await alice.load_all()
def _get_filtered_query(
sender: Type[Model], instance: Model, to_class: Type[Model]
) -> QuerySet:
"""
Helper function.
Gets the query filtered by the appropriate class name.
"""
pk = getattr(instance, f"{to_class.get_name()}").pk
filter_kwargs = {f"{to_class.get_name()}": pk}
query = sender.objects.filter(**filter_kwargs)
return query
def _get_through_model_relations(
sender: Type[Model], instance: Model
) -> Tuple[Type[Model], Type[Model]]:
relations = list(instance.extract_related_names())
rel_one = sender.ormar_config.model_fields[relations[0]].to
rel_two = sender.ormar_config.model_fields[relations[1]].to
return rel_one, rel_two
async def _populate_order_on_insert(
sender: Type[Model], instance: Model, from_class: Type[Model], to_class: Type[Model]
):
"""
Helper function.
Get max values from database for both orders and adds 1 (0 if max is None) if the
order is not provided. If the order is provided it reorders the existing links
to match the newly defined order.
Assumes names f"{model.get_name()}_order" like for Animal: animal_order.
"""
order_column = f"{from_class.get_name()}_order"
if getattr(instance, order_column) is None:
query = _get_filtered_query(sender, instance, to_class)
max_order = await query.max(order_column)
max_order = max_order + 1 if max_order is not None else 0
setattr(instance, order_column, max_order)
else:
await _reorder_on_update(
sender=sender,
instance=instance,
from_class=from_class,
to_class=to_class,
passed_args={order_column: getattr(instance, order_column)},
)
async def _reorder_on_update(
sender: Type[Model],
instance: Model,
from_class: Type[Model],
to_class: Type[Model],
passed_args: Dict,
):
"""
Helper function.
Actually reorders links by given order passed in add/update query to the link
model.
Assumes names f"{model.get_name()}_order" like for Animal: animal_order.
"""
order = f"{from_class.get_name()}_order"
if order in passed_args:
query = _get_filtered_query(sender, instance, to_class)
to_reorder = await query.exclude(pk=instance.pk).order_by(order).all()
new_order = passed_args.get(order)
if to_reorder and new_order is not None:
# can be more efficient - here we renumber all even if not needed.
for ind, link in enumerate(to_reorder):
if ind < new_order:
setattr(link, order, ind)
else:
setattr(link, order, ind + 1)
await sender.objects.bulk_update(
cast(List[Model], to_reorder), columns=[order]
)
@pre_save(Link)
async def order_link_on_insert(sender: Type[Model], instance: Model, **kwargs: Any):
"""
Signal receiver registered on Link model, triggered every time before one is created
by calling save() on a model. Note that signal functions for pre_save signal accepts
sender class, instance and have to accept **kwargs even if it's empty as of now.
"""
rel_one, rel_two = _get_through_model_relations(sender, instance)
await _populate_order_on_insert(
sender=sender, instance=instance, from_class=rel_one, to_class=rel_two
)
await _populate_order_on_insert(
sender=sender, instance=instance, from_class=rel_two, to_class=rel_one
)
@pre_update(Link)
async def reorder_links_on_update(
sender: Type[ormar.Model], instance: ormar.Model, passed_args: Dict, **kwargs: Any
):
"""
Signal receiver registered on Link model, triggered every time before one is updated
by calling update() on a model. Note that signal functions for pre_update signal
accepts sender class, instance, passed_args which is a dict of kwargs passed to
update and have to accept **kwargs even if it's empty as of now.
"""
rel_one, rel_two = _get_through_model_relations(sender, instance)
await _reorder_on_update(
sender=sender,
instance=instance,
from_class=rel_one,
to_class=rel_two,
passed_args=passed_args,
)
await _reorder_on_update(
sender=sender,
instance=instance,
from_class=rel_two,
to_class=rel_one,
passed_args=passed_args,
)
@pre_relation_remove([Animal, Human])
async def reorder_links_on_remove(
sender: Type[ormar.Model],
instance: ormar.Model,
child: ormar.Model,
relation_name: str,
**kwargs: Any,
):
"""
Signal receiver registered on Anima and Human models, triggered every time before
relation on a model is removed. Note that signal functions for pre_relation_remove
signal accepts sender class, instance, child, relation_name and have to accept
**kwargs even if it's empty as of now.
Note that if classes have many relations you need to check if current one is ordered
"""
through_class = sender.ormar_config.model_fields[relation_name].through
through_instance = getattr(instance, through_class.get_name())
if not through_instance:
parent_pk = instance.pk
child_pk = child.pk
filter_kwargs = {f"{sender.get_name()}": parent_pk, child.get_name(): child_pk}
through_instance = await through_class.objects.get(**filter_kwargs)
rel_one, rel_two = _get_through_model_relations(through_class, through_instance)
await _reorder_on_update(
sender=through_class,
instance=through_instance,
from_class=rel_one,
to_class=rel_two,
passed_args={f"{rel_one.get_name()}_order": 999999},
)
await _reorder_on_update(
sender=through_class,
instance=through_instance,
from_class=rel_two,
to_class=rel_one,
passed_args={f"{rel_two.get_name()}_order": 999999},
)
@pytest.mark.asyncio
async def test_ordering_by_through_on_m2m_field():
async with base_ormar_config.database:
def verify_order(instance, expected):
field_name = (
"favoriteAnimals" if isinstance(instance, Human) else "favoriteHumans"
)
order_field_name = (
"animal_order" if isinstance(instance, Human) else "human_order"
)
assert [x.name for x in getattr(instance, field_name)] == expected
assert [
getattr(x.link, order_field_name) for x in getattr(instance, field_name)
] == [i for i in range(len(expected))]
alice = await Human(name="Alice").save()
bob = await Human(name="Bob").save()
charlie = await Human(name="Charlie").save()
spot = await Animal(name="Spot").save()
kitty = await Animal(name="Kitty").save()
noodle = await Animal(name="Noodle").save()
await alice.favoriteAnimals.add(noodle)
await alice.favoriteAnimals.add(spot)
await alice.favoriteAnimals.add(kitty)
await alice.load_all()
verify_order(alice, ["Noodle", "Spot", "Kitty"])
await bob.favoriteAnimals.add(noodle)
await bob.favoriteAnimals.add(kitty)
await bob.favoriteAnimals.add(spot)
await bob.load_all()
verify_order(bob, ["Noodle", "Kitty", "Spot"])
await charlie.favoriteAnimals.add(kitty)
await charlie.favoriteAnimals.add(noodle)
await charlie.favoriteAnimals.add(spot)
await charlie.load_all()
verify_order(charlie, ["Kitty", "Noodle", "Spot"])
animals = [noodle, kitty, spot]
for animal in animals:
await animal.load_all()
verify_order(animal, ["Alice", "Bob", "Charlie"])
zack = await Human(name="Zack").save()
await noodle.favoriteHumans.add(zack, human_order=0)
await noodle.load_all()
verify_order(noodle, ["Zack", "Alice", "Bob", "Charlie"])
await zack.load_all()
verify_order(zack, ["Noodle"])
await noodle.favoriteHumans.filter(name="Zack").update(link=dict(human_order=1))
await noodle.load_all()
verify_order(noodle, ["Alice", "Zack", "Bob", "Charlie"])
await noodle.favoriteHumans.filter(name="Zack").update(link=dict(human_order=2))
await noodle.load_all()
verify_order(noodle, ["Alice", "Bob", "Zack", "Charlie"])
await noodle.favoriteHumans.filter(name="Zack").update(link=dict(human_order=3))
await noodle.load_all()
verify_order(noodle, ["Alice", "Bob", "Charlie", "Zack"])
await kitty.favoriteHumans.remove(bob)
await kitty.load_all()
assert [x.name for x in kitty.favoriteHumans] == ["Alice", "Charlie"]
bob = await noodle.favoriteHumans.get(pk=bob.pk)
assert bob.link.human_order == 1
await noodle.favoriteHumans.remove(
await noodle.favoriteHumans.filter(link__human_order=2).get()
)
await noodle.load_all()
verify_order(noodle, ["Alice", "Bob", "Zack"])
collerek-ormar-c09209a/tests/test_ordering/test_proper_order_of_sorting_apply.py 0000664 0000000 0000000 00000003753 15130200524 0030544 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books", order_by=["-ranking"])
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(
Author, orders_by=["name"], related_orders_by=["-year"]
)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
ranking: int = ormar.Integer(nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(autouse=True, scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
await Book.objects.delete(each=True)
await Author.objects.delete(each=True)
@pytest.mark.asyncio
async def test_default_orders_is_applied_from_reverse_relation():
async with base_ormar_config.database:
tolkien = await Author(name="J.R.R. Tolkien").save()
hobbit = await Book(author=tolkien, title="The Hobbit", year=1933).save()
silmarillion = await Book(
author=tolkien, title="The Silmarillion", year=1977
).save()
lotr = await Book(
author=tolkien, title="The Lord of the Rings", year=1955
).save()
tolkien = await Author.objects.select_related("books").get()
assert tolkien.books[2] == hobbit
assert tolkien.books[1] == lotr
assert tolkien.books[0] == silmarillion
tolkien = (
await Author.objects.select_related("books").order_by("books__title").get()
)
assert tolkien.books[0] == hobbit
assert tolkien.books[1] == lotr
assert tolkien.books[2] == silmarillion
collerek-ormar-c09209a/tests/test_queries/ 0000775 0000000 0000000 00000000000 15130200524 0020627 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_queries/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022726 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_queries/test_adding_related.py 0000664 0000000 0000000 00000002114 15130200524 0025164 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Department(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Course(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
completed: bool = ormar.Boolean(default=False)
department: Optional[Department] = ormar.ForeignKey(Department)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_adding_relation_to_reverse_saves_the_child():
async with base_ormar_config.database:
department = await Department(name="Science").save()
course = Course(name="Math", completed=False)
await department.courses.add(course)
assert course.pk is not None
assert course.department == department
assert department.courses[0] == course
collerek-ormar-c09209a/tests/test_queries/test_aggr_functions.py 0000664 0000000 0000000 00000013207 15130200524 0025253 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
import pytest_asyncio
from ormar.exceptions import QueryDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors", order_by=["-name"])
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="books", order_by=["year", "-ranking"]
)
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
ranking: int = ormar.Integer(nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(autouse=True, scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
await Book.objects.delete(each=True)
await Author.objects.delete(each=True)
async def sample_data():
author = await Author(name="Author 1").save()
await Book(title="Book 1", year=1920, ranking=3, author=author).save()
await Book(title="Book 2", year=1930, ranking=1, author=author).save()
await Book(title="Book 3", year=1923, ranking=5, author=author).save()
@pytest.mark.asyncio
async def test_min_method():
async with base_ormar_config.database:
await sample_data()
assert await Book.objects.min("year") == 1920
result = await Book.objects.min(["year", "ranking"])
assert result == dict(year=1920, ranking=1)
assert await Book.objects.min("title") == "Book 1"
assert await Author.objects.select_related("books").min("books__year") == 1920
result = await Author.objects.select_related("books").min(
["books__year", "books__ranking"]
)
assert result == dict(books__year=1920, books__ranking=1)
assert (
await Author.objects.select_related("books")
.filter(books__year__gt=1925)
.min("books__year")
== 1930
)
@pytest.mark.asyncio
async def test_max_method():
async with base_ormar_config.database:
await sample_data()
assert await Book.objects.max("year") == 1930
result = await Book.objects.max(["year", "ranking"])
assert result == dict(year=1930, ranking=5)
assert await Book.objects.max("title") == "Book 3"
assert await Author.objects.select_related("books").max("books__year") == 1930
result = await Author.objects.select_related("books").max(
["books__year", "books__ranking"]
)
assert result == dict(books__year=1930, books__ranking=5)
assert (
await Author.objects.select_related("books")
.filter(books__year__lt=1925)
.max("books__year")
== 1923
)
@pytest.mark.asyncio
async def test_sum_method():
async with base_ormar_config.database:
await sample_data()
assert await Book.objects.sum("year") == 5773
result = await Book.objects.sum(["year", "ranking"])
assert result == dict(year=5773, ranking=9)
with pytest.raises(QueryDefinitionError):
await Book.objects.sum("title")
assert await Author.objects.select_related("books").sum("books__year") == 5773
result = await Author.objects.select_related("books").sum(
["books__year", "books__ranking"]
)
assert result == dict(books__year=5773, books__ranking=9)
assert (
await Author.objects.select_related("books")
.filter(books__year__lt=1925)
.sum("books__year")
== 3843
)
@pytest.mark.asyncio
async def test_avg_method():
async with base_ormar_config.database:
await sample_data()
assert round(float(await Book.objects.avg("year")), 2) == 1924.33
result = await Book.objects.avg(["year", "ranking"])
assert round(float(result.get("year")), 2) == 1924.33
assert result.get("ranking") == 3.0
with pytest.raises(QueryDefinitionError):
await Book.objects.avg("title")
result = await Author.objects.select_related("books").avg("books__year")
assert round(float(result), 2) == 1924.33
result = await Author.objects.select_related("books").avg(
["books__year", "books__ranking"]
)
assert round(float(result.get("books__year")), 2) == 1924.33
assert result.get("books__ranking") == 3.0
assert (
await Author.objects.select_related("books")
.filter(books__year__lt=1925)
.avg("books__year")
== 1921.5
)
@pytest.mark.asyncio
async def test_queryset_method():
async with base_ormar_config.database:
await sample_data()
author = await Author.objects.select_related("books").get()
assert await author.books.min("year") == 1920
assert await author.books.max("year") == 1930
assert await author.books.sum("ranking") == 9
assert await author.books.avg("ranking") == 3.0
assert await author.books.max(["year", "title"]) == dict(
year=1930, title="Book 3"
)
@pytest.mark.asyncio
async def test_count_method():
async with base_ormar_config.database:
await sample_data()
count = await Author.objects.select_related("books").count()
assert count == 1
# The legacy functionality
count = await Author.objects.select_related("books").count(distinct=False)
assert count == 3
collerek-ormar-c09209a/tests/test_queries/test_deep_relations_select_all.py 0000664 0000000 0000000 00000010400 15130200524 0027417 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from sqlalchemy import func
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Chart(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="charts")
chart_id = ormar.Integer(primary_key=True, autoincrement=True)
name = ormar.String(max_length=200, unique=True, index=True)
query_text = ormar.Text()
datasets = ormar.JSON()
layout = ormar.JSON()
data_config = ormar.JSON()
created_date = ormar.DateTime(server_default=func.now())
library = ormar.String(max_length=200, default="plotly")
used_filters = ormar.JSON()
class Report(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="reports")
report_id = ormar.Integer(primary_key=True, autoincrement=True)
name = ormar.String(max_length=200, unique=True, index=True)
filters_position = ormar.String(max_length=200)
created_date = ormar.DateTime(server_default=func.now())
class Language(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="languages")
language_id = ormar.Integer(primary_key=True, autoincrement=True)
code = ormar.String(max_length=5)
name = ormar.String(max_length=200)
class TranslationNode(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="translation_nodes")
node_id = ormar.Integer(primary_key=True, autoincrement=True)
node_type = ormar.String(max_length=200)
class Translation(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="translations")
translation_id = ormar.Integer(primary_key=True, autoincrement=True)
node_id = ormar.ForeignKey(TranslationNode, related_name="translations")
language = ormar.ForeignKey(Language, name="language_id")
value = ormar.String(max_length=500)
class Filter(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="filters")
filter_id = ormar.Integer(primary_key=True, autoincrement=True)
name = ormar.String(max_length=200, unique=True, index=True)
label = ormar.String(max_length=200)
query_text = ormar.Text()
allow_multiselect = ormar.Boolean(default=True)
created_date = ormar.DateTime(server_default=func.now())
is_dynamic = ormar.Boolean(default=True)
is_date = ormar.Boolean(default=False)
translation = ormar.ForeignKey(TranslationNode, name="translation_node_id")
class FilterValue(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="filter_values")
value_id = ormar.Integer(primary_key=True, autoincrement=True)
value = ormar.String(max_length=300)
label = ormar.String(max_length=300)
filter = ormar.ForeignKey(Filter, name="filter_id", related_name="values")
translation = ormar.ForeignKey(TranslationNode, name="translation_node_id")
class FilterXReport(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="filters_x_reports")
filter_x_report_id = ormar.Integer(primary_key=True)
filter = ormar.ForeignKey(Filter, name="filter_id", related_name="reports")
report = ormar.ForeignKey(Report, name="report_id", related_name="filters")
sort_order = ormar.Integer()
default_value = ormar.Text()
is_visible = ormar.Boolean()
class ChartXReport(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="charts_x_reports")
chart_x_report_id = ormar.Integer(primary_key=True)
chart = ormar.ForeignKey(Chart, name="chart_id", related_name="reports")
report = ormar.ForeignKey(Report, name="report_id", related_name="charts")
sort_order = ormar.Integer()
width = ormar.Integer()
class ChartColumn(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="charts_columns")
column_id = ormar.Integer(primary_key=True, autoincrement=True)
chart = ormar.ForeignKey(Chart, name="chart_id", related_name="columns")
column_name = ormar.String(max_length=200)
column_type = ormar.String(max_length=200)
translation = ormar.ForeignKey(TranslationNode, name="translation_node_id")
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_saving_related_fk_rel():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await Report.objects.select_all(follow=True).all()
collerek-ormar-c09209a/tests/test_queries/test_filter_groups.py 0000664 0000000 0000000 00000011057 15130200524 0025130 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
create_test_database = init_tests(base_ormar_config)
def test_or_group():
result = ormar.or_(name="aa", books__title="bb")
result.resolve(model_cls=Author)
assert len(result.actions) == 2
assert result.actions[0].target_model == Author
assert result.actions[1].target_model == Book
assert (
str(result.get_text_clause().compile(compile_kwargs={"literal_binds": True}))
== f"(authors.name = 'aa' OR "
f"{result.actions[1].table_prefix}"
f"_books.title = 'bb')"
)
def test_and_group():
result = ormar.and_(name="aa", books__title="bb")
result.resolve(model_cls=Author)
assert len(result.actions) == 2
assert result.actions[0].target_model == Author
assert result.actions[1].target_model == Book
assert (
str(result.get_text_clause().compile(compile_kwargs={"literal_binds": True}))
== f"(authors.name = 'aa' AND "
f"{result.actions[1].table_prefix}"
f"_books.title = 'bb')"
)
def test_nested_and():
result = ormar.and_(
ormar.or_(name="aa", books__title="bb"), ormar.or_(name="cc", books__title="dd")
)
result.resolve(model_cls=Author)
assert len(result.actions) == 0
assert len(result._nested_groups) == 2
book_prefix = result._nested_groups[0].actions[1].table_prefix
assert (
str(result.get_text_clause().compile(compile_kwargs={"literal_binds": True}))
== f"((authors.name = 'aa' OR "
f"{book_prefix}"
f"_books.title = 'bb') AND "
f"(authors.name = 'cc' OR "
f"{book_prefix}"
f"_books.title = 'dd'))"
)
def test_nested_group_and_action():
result = ormar.and_(ormar.or_(name="aa", books__title="bb"), books__title="dd")
result.resolve(model_cls=Author)
assert len(result.actions) == 1
assert len(result._nested_groups) == 1
book_prefix = result._nested_groups[0].actions[1].table_prefix
assert (
str(result.get_text_clause().compile(compile_kwargs={"literal_binds": True}))
== f"((authors.name = 'aa' OR "
f"{book_prefix}"
f"_books.title = 'bb') AND "
f"{book_prefix}"
f"_books.title = 'dd')"
)
def test_deeply_nested_or():
result = ormar.or_(
ormar.and_(
ormar.or_(name="aa", books__title="bb"),
ormar.or_(name="cc", books__title="dd"),
),
ormar.and_(
ormar.or_(books__year__lt=1900, books__title="11"),
ormar.or_(books__year__gt="xx", books__title="22"),
),
)
result.resolve(model_cls=Author)
assert len(result.actions) == 0
assert len(result._nested_groups) == 2
assert len(result._nested_groups[0]._nested_groups) == 2
book_prefix = result._nested_groups[0]._nested_groups[0].actions[1].table_prefix
result_qry = str(
result.get_text_clause().compile(compile_kwargs={"literal_binds": True})
)
expected_qry = (
f"(((authors.name = 'aa' OR {book_prefix}_books.title = 'bb') AND "
f"(authors.name = 'cc' OR {book_prefix}_books.title = 'dd')) "
f"OR (({book_prefix}_books.year < 1900 OR {book_prefix}_books.title = '11') AND"
f" ({book_prefix}_books.year > 'xx' OR {book_prefix}_books.title = '22')))"
)
assert result_qry.replace("\n", "") == expected_qry.replace("\n", "")
def test_one_model_group():
result = ormar.and_(year__gt=1900, title="bb")
result.resolve(model_cls=Book)
assert len(result.actions) == 2
assert len(result._nested_groups) == 0
def test_one_model_nested_group():
result = ormar.and_(
ormar.or_(year__gt=1900, title="bb"), ormar.or_(year__lt=1800, title="aa")
)
result.resolve(model_cls=Book)
assert len(result.actions) == 0
assert len(result._nested_groups) == 2
def test_one_model_with_group():
result = ormar.or_(ormar.and_(year__gt=1900, title="bb"), title="uu")
result.resolve(model_cls=Book)
assert len(result.actions) == 1
assert len(result._nested_groups) == 1
collerek-ormar-c09209a/tests/test_queries/test_indirect_relations_to_self.py 0000664 0000000 0000000 00000003321 15130200524 0027633 0 ustar 00root root 0000000 0000000 from datetime import datetime
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Node(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="node")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=120)
type: str = ormar.String(max_length=12, default="FLOW")
created_at: datetime = ormar.DateTime(timezone=True, default=datetime.now)
class Edge(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="edge")
id: str = ormar.String(primary_key=True, max_length=12)
src_node: Node = ormar.ForeignKey(Node, related_name="next_edges")
dst_node: Node = ormar.ForeignKey(Node, related_name="previous_edges")
order: int = ormar.Integer(default=1)
created_at: datetime = ormar.DateTime(timezone=True, default=datetime.now)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_sort_order_on_main_model():
async with base_ormar_config.database:
node1 = await Node(name="Node 1").save()
node2 = await Node(name="Node 2").save()
node3 = await Node(name="Node 3").save()
await Edge(id="Side 1", src_node=node1, dst_node=node2).save()
await Edge(id="Side 2", src_node=node2, dst_node=node3, order=2).save()
await Edge(id="Side 3", src_node=node3, dst_node=node1, order=3).save()
active_nodes = await Node.objects.select_related(
["next_edges", "next_edges__dst_node"]
).all()
assert len(active_nodes) == 3
assert active_nodes[0].next_edges[0].id == "Side 1"
assert active_nodes[0].next_edges[0].dst_node.type == "FLOW"
collerek-ormar-c09209a/tests/test_queries/test_isnull_filter.py 0000664 0000000 0000000 00000005767 15130200524 0025132 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
class JsonModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="jsons")
id = ormar.Integer(primary_key=True)
text_field = ormar.Text(nullable=True)
json_field = ormar.JSON(nullable=True)
json_not_null = ormar.JSON()
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_is_null():
async with base_ormar_config.database:
tolkien = await Author.objects.create(name="J.R.R. Tolkien")
await Book.objects.create(author=tolkien, title="The Hobbit")
await Book.objects.create(
author=tolkien, title="The Lord of the Rings", year=1955
)
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
books = await Book.objects.all(year__isnull=True)
assert len(books) == 1
assert books[0].year is None
assert books[0].title == "The Hobbit"
books = await Book.objects.all(year__isnull=False)
assert len(books) == 2
tolkien = await Author.objects.select_related("books").get(
books__year__isnull=True
)
assert len(tolkien.books) == 1
assert tolkien.books[0].year is None
assert tolkien.books[0].title == "The Hobbit"
tolkien = (
await Author.objects.select_related("books")
.paginate(1, 10)
.get(books__year__isnull=True)
)
assert len(tolkien.books) == 1
assert tolkien.books[0].year is None
assert tolkien.books[0].title == "The Hobbit"
tolkien = await Author.objects.select_related("books").get(
books__year__isnull=False
)
assert len(tolkien.books) == 2
assert tolkien.books[0].year == 1955
assert tolkien.books[0].title == "The Lord of the Rings"
@pytest.mark.asyncio
async def test_isnull_json():
async with base_ormar_config.database:
author = await JsonModel.objects.create(json_not_null=None)
assert author.json_field is None
non_null_text_fields = await JsonModel.objects.all(text_field__isnull=False)
assert len(non_null_text_fields) == 0
non_null_json_fields = await JsonModel.objects.all(json_field__isnull=False)
assert len(non_null_json_fields) == 0
non_null_json_fields = await JsonModel.objects.all(json_not_null__isnull=False)
assert len(non_null_json_fields) == 1
collerek-ormar-c09209a/tests/test_queries/test_nested_reverse_relations.py 0000664 0000000 0000000 00000005510 15130200524 0027336 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class DataSource(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="datasources")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200, unique=True, index=True)
class DataSourceTable(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="source_tables")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200, index=True)
source: Optional[DataSource] = ormar.ForeignKey(
DataSource, name="source_id", related_name="tables", ondelete="CASCADE"
)
class DataSourceTableColumn(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="source_columns")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200, index=True)
data_type: str = ormar.String(max_length=200)
table: Optional[DataSourceTable] = ormar.ForeignKey(
DataSourceTable, name="table_id", related_name="columns", ondelete="CASCADE"
)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_double_nested_reverse_relation():
async with base_ormar_config.database:
data_source = await DataSource(name="local").save()
test_tables = [
{
"name": "test1",
"columns": [
{"name": "col1", "data_type": "test"},
{"name": "col2", "data_type": "test2"},
{"name": "col3", "data_type": "test3"},
],
},
{
"name": "test2",
"columns": [
{"name": "col4", "data_type": "test"},
{"name": "col5", "data_type": "test2"},
{"name": "col6", "data_type": "test3"},
],
},
]
data_source.tables = test_tables
await data_source.save_related(save_all=True, follow=True)
tables = await DataSourceTable.objects.all()
assert len(tables) == 2
columns = await DataSourceTableColumn.objects.all()
assert len(columns) == 6
data_source = (
await DataSource.objects.select_related("tables__columns")
.filter(tables__name__in=["test1", "test2"], name="local")
.get()
)
assert len(data_source.tables) == 2
assert len(data_source.tables[0].columns) == 3
assert data_source.tables[0].columns[0].name == "col1"
assert data_source.tables[0].columns[2].name == "col3"
assert len(data_source.tables[1].columns) == 3
assert data_source.tables[1].columns[0].name == "col4"
assert data_source.tables[1].columns[2].name == "col6"
collerek-ormar-c09209a/tests/test_queries/test_non_relation_fields_not_merged.py 0000664 0000000 0000000 00000002024 15130200524 0030456 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Chart(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
datasets = ormar.JSON()
class Config(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
chart: Optional[Chart] = ormar.ForeignKey(Chart)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_list_field_that_is_not_relation_is_not_merged():
async with base_ormar_config.database:
chart = await Chart.objects.create(datasets=[{"test": "ok"}])
await Config.objects.create(chart=chart)
await Config.objects.create(chart=chart)
chart2 = await Chart.objects.select_related("configs").get()
assert len(chart2.datasets) == 1
assert chart2.datasets == [{"test": "ok"}]
collerek-ormar-c09209a/tests/test_queries/test_or_filters.py 0000664 0000000 0000000 00000020123 15130200524 0024406 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from ormar.exceptions import QueryDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
author: Optional[Author] = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_or_filters():
async with base_ormar_config.database:
tolkien = await Author(name="J.R.R. Tolkien").save()
await Book(author=tolkien, title="The Hobbit", year=1933).save()
await Book(author=tolkien, title="The Lord of the Rings", year=1955).save()
await Book(author=tolkien, title="The Silmarillion", year=1977).save()
sapkowski = await Author(name="Andrzej Sapkowski").save()
await Book(author=sapkowski, title="The Witcher", year=1990).save()
await Book(author=sapkowski, title="The Tower of Fools", year=2002).save()
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(author__name="J.R.R. Tolkien", year__gt=1970))
.all()
)
assert len(books) == 5
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(author__name="J.R.R. Tolkien", year__lt=1995))
.all()
)
assert len(books) == 4
assert not any([x.title == "The Tower of Fools" for x in books])
books = (
await Book.objects.select_related("author")
.filter((Book.author.name == "J.R.R. Tolkien") | (Book.year < 1995))
.all()
)
assert len(books) == 4
assert not any([x.title == "The Tower of Fools" for x in books])
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(year__gt=1960, year__lt=1940))
.filter(author__name="J.R.R. Tolkien")
.all()
)
assert len(books) == 2
assert books[0].title == "The Hobbit"
assert books[1].title == "The Silmarillion"
books = (
await Book.objects.select_related("author")
.filter(
ormar.and_(
ormar.or_(year__gt=1960, year__lt=1940),
author__name="J.R.R. Tolkien",
)
)
.all()
)
assert len(books) == 2
assert books[0].title == "The Hobbit"
assert books[1].title == "The Silmarillion"
books = (
await Book.objects.select_related("author")
.filter(
ormar.or_(
ormar.and_(year__gt=1960, author__name="J.R.R. Tolkien"),
ormar.and_(year__lt=2000, author__name="Andrzej Sapkowski"),
)
)
.filter(title__startswith="The")
.all()
)
assert len(books) == 2
assert books[0].title == "The Silmarillion"
assert books[1].title == "The Witcher"
books = (
await Book.objects.select_related("author")
.filter(
(
(
(Book.year > 1960) & (Book.author.name == "J.R.R. Tolkien")
| (
(Book.year < 2000)
& (Book.author.name == "Andrzej Sapkowski")
)
)
& (Book.title.startswith("The"))
)
)
.all()
)
assert len(books) == 2
assert books[0].title == "The Silmarillion"
assert books[1].title == "The Witcher"
books = (
await Book.objects.select_related("author")
.filter(
ormar.or_(
ormar.and_(
ormar.or_(year__gt=1960, year__lt=1940),
author__name="J.R.R. Tolkien",
),
ormar.and_(year__lt=2000, author__name="Andrzej Sapkowski"),
)
)
.all()
)
assert len(books) == 3
assert books[0].title == "The Hobbit"
assert books[1].title == "The Silmarillion"
assert books[2].title == "The Witcher"
books = (
await Book.objects.select_related("author")
.exclude(
ormar.or_(
ormar.and_(year__gt=1960, author__name="J.R.R. Tolkien"),
ormar.and_(year__lt=2000, author__name="Andrzej Sapkowski"),
)
)
.filter(title__startswith="The")
.all()
)
assert len(books) == 3
assert not any([x.title in ["The Silmarillion", "The Witcher"] for x in books])
books = (
await Book.objects.select_related("author")
.filter(
ormar.or_(
ormar.and_(year__gt=1960, author__name="J.R.R. Tolkien"),
ormar.and_(year__lt=2000, author__name="Andrzej Sapkowski"),
title__icontains="hobbit",
)
)
.filter(title__startswith="The")
.all()
)
assert len(books) == 3
assert not any(
[x.title in ["The Tower of Fools", "The Lord of the Rings"] for x in books]
)
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(year__gt=1980, year__lt=1910))
.filter(title__startswith="The")
.limit(1)
.all()
)
assert len(books) == 1
assert books[0].title == "The Witcher"
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(year__gt=1980, author__name="Andrzej Sapkowski"))
.filter(title__startswith="The")
.limit(1)
.all()
)
assert len(books) == 1
assert books[0].title == "The Witcher"
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(year__gt=1980, author__name="Andrzej Sapkowski"))
.filter(title__startswith="The")
.limit(1)
.offset(1)
.all()
)
assert len(books) == 1
assert books[0].title == "The Tower of Fools"
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(year__gt=1980, author__name="Andrzej Sapkowski"))
.filter(title__startswith="The")
.limit(1)
.offset(1)
.order_by("-id")
.all()
)
assert len(books) == 1
assert books[0].title == "The Witcher"
with pytest.raises(QueryDefinitionError):
await Book.objects.select_related("author").filter("wrong").all()
books = await tolkien.books.filter(
ormar.or_(year__lt=1940, year__gt=1960)
).all()
assert len(books) == 2
books = await tolkien.books.filter(
ormar.and_(
ormar.or_(year__lt=1940, year__gt=1960), title__icontains="hobbit"
)
).all()
assert len(books) == 1
assert tolkien.books[0].title == "The Hobbit"
books = (
await Book.objects.select_related("author")
.filter(ormar.or_(author__name="J.R.R. Tolkien"))
.all()
)
assert len(books) == 3
books = (
await Book.objects.select_related("author")
.filter(
ormar.or_(
ormar.and_(author__name__icontains="tolkien"),
ormar.and_(author__name__icontains="sapkowski"),
)
)
.all()
)
assert len(books) == 5
collerek-ormar-c09209a/tests/test_queries/test_order_by.py 0000664 0000000 0000000 00000026071 15130200524 0024053 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Song(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="songs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
sort_order: int = ormar.Integer()
class Owner(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="owners")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class AliasNested(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="aliases_nested")
id: int = ormar.Integer(name="alias_id", primary_key=True)
name: str = ormar.String(name="alias_name", max_length=100)
class AliasTest(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="aliases")
id: int = ormar.Integer(name="alias_id", primary_key=True)
name: str = ormar.String(name="alias_name", max_length=100)
nested = ormar.ForeignKey(AliasNested, name="nested_alias")
class Toy(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="toys")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
owner: Owner = ormar.ForeignKey(Owner)
class Factory(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="factories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Car(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cars")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
factory: Optional[Factory] = ormar.ForeignKey(Factory)
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="users")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
cars: List[Car] = ormar.ManyToMany(Car)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_sort_order_on_main_model():
async with base_ormar_config.database:
await Song.objects.create(name="Song 3", sort_order=3)
await Song.objects.create(name="Song 1", sort_order=1)
await Song.objects.create(name="Song 2", sort_order=2)
songs = await Song.objects.all()
assert songs[0].name == "Song 3"
assert songs[1].name == "Song 1"
assert songs[2].name == "Song 2"
songs = await Song.objects.order_by("-sort_order").all()
assert songs[0].name == "Song 3"
assert songs[1].name == "Song 2"
assert songs[2].name == "Song 1"
songs = await Song.objects.order_by(Song.sort_order.desc()).all()
assert songs[0].name == "Song 3"
assert songs[1].name == "Song 2"
assert songs[2].name == "Song 1"
songs = await Song.objects.order_by("sort_order").all()
assert songs[0].name == "Song 1"
assert songs[1].name == "Song 2"
assert songs[2].name == "Song 3"
songs = await Song.objects.order_by(Song.sort_order.asc()).all()
assert songs[0].name == "Song 1"
assert songs[1].name == "Song 2"
assert songs[2].name == "Song 3"
songs = await Song.objects.order_by("name").all()
assert songs[0].name == "Song 1"
assert songs[1].name == "Song 2"
assert songs[2].name == "Song 3"
songs = await Song.objects.order_by("name").limit(2).all()
assert len(songs) == 2
assert songs[0].name == "Song 1"
assert songs[1].name == "Song 2"
await Song.objects.create(name="Song 4", sort_order=1)
songs = await Song.objects.order_by(["sort_order", "name"]).all()
assert songs[0].name == "Song 1"
assert songs[1].name == "Song 4"
assert songs[2].name == "Song 2"
assert songs[3].name == "Song 3"
songs = await Song.objects.order_by(
[Song.sort_order.asc(), Song.name.asc()]
).all()
assert songs[0].name == "Song 1"
assert songs[1].name == "Song 4"
assert songs[2].name == "Song 2"
assert songs[3].name == "Song 3"
@pytest.mark.asyncio
async def test_sort_order_on_related_model():
async with base_ormar_config.database:
aphrodite = await Owner.objects.create(name="Aphrodite")
hermes = await Owner.objects.create(name="Hermes")
zeus = await Owner.objects.create(name="Zeus")
await Toy.objects.create(name="Toy 4", owner=zeus)
await Toy.objects.create(name="Toy 5", owner=hermes)
await Toy.objects.create(name="Toy 2", owner=aphrodite)
await Toy.objects.create(name="Toy 1", owner=zeus)
await Toy.objects.create(name="Toy 3", owner=aphrodite)
await Toy.objects.create(name="Toy 6", owner=hermes)
toys = await Toy.objects.select_related("owner").order_by("name").all()
assert [x.name.replace("Toy ", "") for x in toys] == [
str(x + 1) for x in range(6)
]
assert toys[0].owner == zeus
assert toys[1].owner == aphrodite
toys = await Toy.objects.select_related("owner").order_by("owner__name").all()
assert toys[0].owner.name == toys[1].owner.name == "Aphrodite"
assert toys[2].owner.name == toys[3].owner.name == "Hermes"
assert toys[4].owner.name == toys[5].owner.name == "Zeus"
owner = (
await Owner.objects.select_related("toys")
.order_by("toys__name")
.filter(name="Zeus")
.get()
)
assert owner.toys[0].name == "Toy 1"
assert owner.toys[1].name == "Toy 4"
owner = (
await Owner.objects.select_related("toys")
.order_by("-toys__name")
.filter(name="Zeus")
.get()
)
assert owner.toys[0].name == "Toy 4"
assert owner.toys[1].name == "Toy 1"
owners = (
await Owner.objects.select_related("toys")
.order_by("-toys__name")
.filter(name__in=["Zeus", "Hermes"])
.all()
)
assert owners[0].toys[0].name == "Toy 6"
assert owners[0].toys[1].name == "Toy 5"
assert owners[0].name == "Hermes"
assert owners[1].toys[0].name == "Toy 4"
assert owners[1].toys[1].name == "Toy 1"
assert owners[1].name == "Zeus"
await Toy.objects.create(name="Toy 7", owner=zeus)
owners = (
await Owner.objects.select_related("toys")
.order_by("-toys__name")
.filter(name__in=["Zeus", "Hermes"])
.all()
)
assert owners[0].toys[0].name == "Toy 7"
assert owners[0].toys[1].name == "Toy 4"
assert owners[0].toys[2].name == "Toy 1"
assert owners[0].name == "Zeus"
assert owners[1].toys[0].name == "Toy 6"
assert owners[1].toys[1].name == "Toy 5"
assert owners[1].name == "Hermes"
toys = (
await Toy.objects.select_related("owner")
.order_by(["owner__name", "name"])
.limit(2)
.all()
)
assert len(toys) == 2
assert toys[0].name == "Toy 2"
assert toys[1].name == "Toy 3"
@pytest.mark.asyncio
async def test_sort_order_on_many_to_many():
async with base_ormar_config.database:
factory1 = await Factory.objects.create(name="Factory 1")
factory2 = await Factory.objects.create(name="Factory 2")
car1 = await Car.objects.create(name="Buggy", factory=factory1)
car2 = await Car.objects.create(name="Volkswagen", factory=factory2)
car3 = await Car.objects.create(name="Ferrari", factory=factory1)
car4 = await Car.objects.create(name="Volvo", factory=factory2)
car5 = await Car.objects.create(name="Skoda", factory=factory1)
car6 = await Car.objects.create(name="Seat", factory=factory2)
user1 = await User.objects.create(name="Mark")
user2 = await User.objects.create(name="Julie")
await user1.cars.add(car1)
await user1.cars.add(car3)
await user1.cars.add(car4)
await user1.cars.add(car5)
await user2.cars.add(car1)
await user2.cars.add(car2)
await user2.cars.add(car5)
await user2.cars.add(car6)
user = (
await User.objects.select_related("cars")
.filter(name="Mark")
.order_by("cars__name")
.get()
)
assert user.cars[0].name == "Buggy"
assert user.cars[1].name == "Ferrari"
assert user.cars[2].name == "Skoda"
assert user.cars[3].name == "Volvo"
user = (
await User.objects.select_related("cars")
.filter(name="Mark")
.order_by("-cars__name")
.get()
)
assert user.cars[3].name == "Buggy"
assert user.cars[2].name == "Ferrari"
assert user.cars[1].name == "Skoda"
assert user.cars[0].name == "Volvo"
users = await User.objects.select_related("cars").order_by("-cars__name").all()
assert users[0].name == "Mark"
assert users[1].cars[0].name == "Volkswagen"
assert users[1].cars[1].name == "Skoda"
assert users[1].cars[2].name == "Seat"
assert users[1].cars[3].name == "Buggy"
users = (
await User.objects.select_related(["cars__factory"])
.order_by(["-cars__factory__name", "cars__name"])
.all()
)
assert users[0].name == "Julie"
assert users[0].cars[0].name == "Seat"
assert users[0].cars[1].name == "Volkswagen"
assert users[0].cars[2].name == "Buggy"
assert users[0].cars[3].name == "Skoda"
assert users[1].name == "Mark"
assert users[1].cars[0].name == "Volvo"
assert users[1].cars[1].name == "Buggy"
assert users[1].cars[2].name == "Ferrari"
assert users[1].cars[3].name == "Skoda"
@pytest.mark.asyncio
async def test_sort_order_with_aliases():
async with base_ormar_config.database:
al1 = await AliasTest.objects.create(name="Test4")
al2 = await AliasTest.objects.create(name="Test2")
al3 = await AliasTest.objects.create(name="Test1")
al4 = await AliasTest.objects.create(name="Test3")
aliases = await AliasTest.objects.order_by("-name").all()
assert [alias.name[-1] for alias in aliases] == ["4", "3", "2", "1"]
nest1 = await AliasNested.objects.create(name="Try1")
nest2 = await AliasNested.objects.create(name="Try2")
nest3 = await AliasNested.objects.create(name="Try3")
nest4 = await AliasNested.objects.create(name="Try4")
al1.nested = nest1
await al1.update()
al2.nested = nest2
await al2.update()
al3.nested = nest3
await al3.update()
al4.nested = nest4
await al4.update()
aliases = (
await AliasTest.objects.select_related("nested")
.order_by("-nested__name")
.all()
)
assert aliases[0].nested.name == "Try4"
assert aliases[1].nested.name == "Try3"
assert aliases[2].nested.name == "Try2"
assert aliases[3].nested.name == "Try1"
collerek-ormar-c09209a/tests/test_queries/test_pagination.py 0000664 0000000 0000000 00000006505 15130200524 0024377 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from ormar.exceptions import QueryDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Car(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class UsersCar(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cars_x_users")
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
cars = ormar.ManyToMany(Car, through=UsersCar)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_limit_zero():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
for i in range(5):
await Car(name=f"{i}").save()
cars = await Car.objects.limit(0).all()
assert cars == []
assert len(cars) == 0
@pytest.mark.asyncio
async def test_pagination_errors():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
with pytest.raises(QueryDefinitionError):
await Car.objects.paginate(0).all()
with pytest.raises(QueryDefinitionError):
await Car.objects.paginate(1, page_size=0).all()
@pytest.mark.asyncio
async def test_pagination_on_single_model():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
for i in range(20):
await Car(name=f"{i}").save()
cars_page1 = await Car.objects.paginate(1, page_size=5).all()
assert len(cars_page1) == 5
assert cars_page1[0].name == "0"
assert cars_page1[4].name == "4"
cars_page2 = await Car.objects.paginate(2, page_size=5).all()
assert len(cars_page2) == 5
assert cars_page2[0].name == "5"
assert cars_page2[4].name == "9"
all_cars = await Car.objects.paginate(1).all()
assert len(all_cars) == 20
half_cars = await Car.objects.paginate(2, page_size=10).all()
assert len(half_cars) == 10
assert half_cars[0].name == "10"
@pytest.mark.asyncio
async def test_proxy_pagination():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
user = await User(name="Jon").save()
for i in range(20):
c = await Car(name=f"{i}").save()
await user.cars.add(c)
await user.cars.paginate(1, page_size=5).all()
assert len(user.cars) == 5
assert user.cars[0].name == "0"
assert user.cars[4].name == "4"
await user.cars.paginate(2, page_size=5).all()
assert len(user.cars) == 5
assert user.cars[0].name == "5"
assert user.cars[4].name == "9"
await user.cars.paginate(1).all()
assert len(user.cars) == 20
await user.cars.paginate(2, page_size=10).all()
assert len(user.cars) == 10
assert user.cars[0].name == "10"
collerek-ormar-c09209a/tests/test_queries/test_queryproxy_on_m2m_models.py 0000664 0000000 0000000 00000016613 15130200524 0027330 0 ustar 00root root 0000000 0000000 from typing import List, Optional, Union
import ormar
import pytest
from ormar.exceptions import QueryDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Subject(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="subjects")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=80)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=80)
last_name: str = ormar.String(max_length=80)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
sort_order: int = ormar.Integer(nullable=True)
subject: Optional[Subject] = ormar.ForeignKey(Subject)
class PostCategory(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts_categories")
class Post(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[Union[Category, List[Category]]] = ormar.ManyToMany(
Category, through=PostCategory
)
author: Optional[Author] = ormar.ForeignKey(Author)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_queryset_methods():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
guido = await Author.objects.create(
first_name="Guido", last_name="Van Rossum"
)
subject = await Subject(name="Random").save()
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(
name="News", sort_order=1, subject=subject
)
breaking = await Category.objects.create(
name="Breaking", sort_order=3, subject=subject
)
# Add a category to a post.
await post.categories.add(news)
await post.categories.add(breaking)
category, created = await post.categories.get_or_create(name="News")
assert category == news
assert len(post.categories) == 1
assert created is False
category, created = await post.categories.get_or_create(
name="Breaking News"
)
assert category != breaking
assert category.pk is not None
assert len(post.categories) == 2
assert created is True
await post.categories.update_or_create(pk=category.pk, name="Urgent News")
assert len(post.categories) == 2
cat, created = await post.categories.get_or_create(name="Urgent News")
assert cat.pk == category.pk
assert len(post.categories) == 1
assert created is False
await post.categories.remove(cat)
await cat.delete()
assert len(post.categories) == 0
category = await post.categories.update_or_create(
name="Weather News", sort_order=2, subject=subject
)
assert category.pk is not None
assert category.posts[0] == post
assert len(post.categories) == 1
categories = await post.categories.all()
assert len(categories) == 3 == len(post.categories)
assert await post.categories.exists()
assert 3 == await post.categories.count()
categories = await post.categories.limit(2).all()
assert len(categories) == 2 == len(post.categories)
categories2 = await post.categories.limit(2).offset(1).all()
assert len(categories2) == 2 == len(post.categories)
assert categories != categories2
categories = await post.categories.order_by("-sort_order").all()
assert len(categories) == 3 == len(post.categories)
assert post.categories[2].name == "News"
assert post.categories[0].name == "Breaking"
categories = await post.categories.exclude(name__icontains="news").all()
assert len(categories) == 1 == len(post.categories)
assert post.categories[0].name == "Breaking"
categories = (
await post.categories.filter(name__icontains="news")
.order_by("-name")
.all()
)
assert len(categories) == 2 == len(post.categories)
assert post.categories[0].name == "Weather News"
assert post.categories[1].name == "News"
categories = await post.categories.fields("name").all()
assert len(categories) == 3 == len(post.categories)
for cat in post.categories:
assert cat.sort_order is None
categories = await post.categories.exclude_fields("sort_order").all()
assert len(categories) == 3 == len(post.categories)
for cat in post.categories:
assert cat.sort_order is None
assert cat.subject.name is None
categories = await post.categories.select_related("subject").all()
assert len(categories) == 3 == len(post.categories)
for cat in post.categories:
assert cat.subject.name is not None
categories = await post.categories.prefetch_related("subject").all()
assert len(categories) == 3 == len(post.categories)
for cat in post.categories:
assert cat.subject.name is not None
@pytest.mark.asyncio
async def test_queryset_update():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
guido = await Author.objects.create(
first_name="Guido", last_name="Van Rossum"
)
subject = await Subject(name="Random").save()
post = await Post.objects.create(title="Hello, M2M", author=guido)
await post.categories.create(name="News", sort_order=1, subject=subject)
await post.categories.create(name="Breaking", sort_order=3, subject=subject)
await post.categories.order_by("sort_order").all()
assert len(post.categories) == 2
assert post.categories[0].sort_order == 1
assert post.categories[0].name == "News"
assert post.categories[1].sort_order == 3
assert post.categories[1].name == "Breaking"
updated = await post.categories.update(each=True, name="Test")
assert updated == 2
await post.categories.order_by("sort_order").all()
assert len(post.categories) == 2
assert post.categories[0].name == "Test"
assert post.categories[1].name == "Test"
updated = await post.categories.filter(sort_order=3).update(name="Test 2")
assert updated == 1
await post.categories.order_by("sort_order").all()
assert len(post.categories) == 2
assert post.categories[0].name == "Test"
assert post.categories[1].name == "Test 2"
with pytest.raises(QueryDefinitionError):
await post.categories.update(name="Test WRONG")
collerek-ormar-c09209a/tests/test_queries/test_queryset_level_methods.py 0000664 0000000 0000000 00000035443 15130200524 0027044 0 ustar 00root root 0000000 0000000 from enum import Enum
from typing import Optional
import ormar
import pydantic
import pytest
from ormar import QuerySet
from ormar.exceptions import (
ModelListEmptyError,
ModelPersistenceError,
QueryDefinitionError,
)
from pydantic import Json
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class MySize(Enum):
SMALL = 0
BIG = 1
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
author: str = ormar.String(max_length=100)
genre: str = ormar.String(
max_length=100,
default="Fiction",
)
class ToDo(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="todos")
id: int = ormar.Integer(primary_key=True)
text: str = ormar.String(max_length=500)
completed: bool = ormar.Boolean(default=False)
pairs: pydantic.Json = ormar.JSON(default=[])
size = ormar.Enum(enum_class=MySize, default=MySize.SMALL)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=500)
class Note(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="notes")
id: int = ormar.Integer(primary_key=True)
text: str = ormar.String(max_length=500)
category: Optional[Category] = ormar.ForeignKey(Category)
class ItemConfig(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="item_config")
id: Optional[int] = ormar.Integer(primary_key=True)
item_id: str = ormar.String(max_length=32, index=True)
pairs: pydantic.Json = ormar.JSON(default=["2", "3"])
size = ormar.Enum(enum_class=MySize, default=MySize.SMALL)
class QuerySetCls(QuerySet):
async def first_or_404(self, *args, **kwargs):
entity = await self.get_or_none(*args, **kwargs)
if not entity:
# maybe HTTPException in fastapi
raise ValueError("customer not found")
return entity
class Customer(ormar.Model):
ormar_config = base_ormar_config.copy(
tablename="customer",
queryset_class=QuerySetCls,
)
id: Optional[int] = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=32)
class JsonTestModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_model")
id: int = ormar.Integer(primary_key=True)
json_field: Json = ormar.JSON()
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_delete_and_update():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await Book.objects.create(
title="Tom Sawyer", author="Twain, Mark", genre="Adventure"
)
await Book.objects.create(
title="War and Peace", author="Tolstoy, Leo", genre="Fiction"
)
await Book.objects.create(
title="Anna Karenina", author="Tolstoy, Leo", genre="Fiction"
)
await Book.objects.create(
title="Harry Potter", author="Rowling, J.K.", genre="Fantasy"
)
await Book.objects.create(
title="Lord of the Rings", author="Tolkien, J.R.", genre="Fantasy"
)
all_books = await Book.objects.all()
assert len(all_books) == 5
await Book.objects.filter(author="Tolstoy, Leo").update(
author="Lenin, Vladimir"
)
all_books = await Book.objects.filter(author="Lenin, Vladimir").all()
assert len(all_books) == 2
historic_books = await Book.objects.filter(genre="Historic").all()
assert len(historic_books) == 0
with pytest.raises(QueryDefinitionError):
await Book.objects.update(genre="Historic")
await Book.objects.filter(author="Lenin, Vladimir").update(genre="Historic")
historic_books = await Book.objects.filter(genre="Historic").all()
assert len(historic_books) == 2
await Book.objects.delete(genre="Fantasy")
all_books = await Book.objects.all()
assert len(all_books) == 3
await Book.objects.update(each=True, genre="Fiction")
all_books = await Book.objects.filter(genre="Fiction").all()
assert len(all_books) == 3
with pytest.raises(QueryDefinitionError):
await Book.objects.delete()
await Book.objects.delete(each=True)
all_books = await Book.objects.all()
assert len(all_books) == 0
@pytest.mark.asyncio
async def test_get_or_create():
async with base_ormar_config.database:
tom, created = await Book.objects.get_or_create(
title="Volume I", author="Anonymous", genre="Fiction"
)
assert await Book.objects.count() == 1
assert created is True
second_tom, created = await Book.objects.get_or_create(
title="Volume I", author="Anonymous", genre="Fiction"
)
assert second_tom.pk == tom.pk
assert created is False
assert await Book.objects.count() == 1
assert await Book.objects.create(
title="Volume I", author="Anonymous", genre="Fiction"
)
with pytest.raises(ormar.exceptions.MultipleMatches):
await Book.objects.get_or_create(
title="Volume I", author="Anonymous", genre="Fiction"
)
@pytest.mark.asyncio
async def test_get_or_create_with_defaults():
async with base_ormar_config.database:
book, created = await Book.objects.get_or_create(
title="Nice book", _defaults={"author": "Mojix", "genre": "Historic"}
)
assert created is True
assert book.author == "Mojix"
assert book.title == "Nice book"
assert book.genre == "Historic"
book2, created = await Book.objects.get_or_create(
author="Mojix", _defaults={"title": "Book2"}
)
assert created is False
assert book2 == book
assert book2.title == "Nice book"
assert book2.author == "Mojix"
assert book2.genre == "Historic"
assert await Book.objects.count() == 1
book, created = await Book.objects.get_or_create(
title="doesn't exist",
_defaults={"title": "overwritten", "author": "Mojix", "genre": "Historic"},
)
assert created is True
assert book.title == "overwritten"
book2, created = await Book.objects.get_or_create(
title="overwritten", _defaults={"title": "doesn't work"}
)
assert created is False
assert book2.title == "overwritten"
assert book2 == book
@pytest.mark.asyncio
async def test_update_or_create():
async with base_ormar_config.database:
tom = await Book.objects.update_or_create(
title="Volume I", author="Anonymous", genre="Fiction"
)
assert await Book.objects.count() == 1
assert await Book.objects.update_or_create(id=tom.id, genre="Historic")
assert await Book.objects.count() == 1
assert await Book.objects.update_or_create(pk=tom.id, genre="Fantasy")
assert await Book.objects.count() == 1
assert await Book.objects.create(
title="Volume I", author="Anonymous", genre="Fantasy"
)
with pytest.raises(ormar.exceptions.MultipleMatches):
await Book.objects.get(
title="Volume I", author="Anonymous", genre="Fantasy"
)
@pytest.mark.asyncio
async def test_bulk_create():
async with base_ormar_config.database:
await ToDo.objects.bulk_create(
[
ToDo(text="Buy the groceries."),
ToDo(text="Call Mum.", completed=True),
ToDo(text="Send invoices.", completed=True),
]
)
todoes = await ToDo.objects.all()
assert len(todoes) == 3
for todo in todoes:
assert todo.pk is not None
completed = await ToDo.objects.filter(completed=True).all()
assert len(completed) == 2
with pytest.raises(ormar.exceptions.ModelListEmptyError):
await ToDo.objects.bulk_create([])
@pytest.mark.asyncio
async def test_bulk_create_json_field():
async with base_ormar_config.database:
json_value = {"a": 1}
test_model_1 = JsonTestModel(id=1, json_field=json_value)
test_model_2 = JsonTestModel(id=2, json_field=json_value)
# store one with .save() and the other with .bulk_create()
await test_model_1.save()
await JsonTestModel.objects.bulk_create([test_model_2])
# refresh from the database
await test_model_1.load()
await test_model_2.load()
assert test_model_1.json_field == test_model_2.json_field # True
# try to query the json field
table = JsonTestModel.ormar_config.table
query = table.select().where(table.c.json_field["a"].as_integer() == 1)
res = [
JsonTestModel.from_row(record, source_model=JsonTestModel)
for record in await base_ormar_config.database.fetch_all(query)
]
assert test_model_1 in res
assert test_model_2 in res
assert len(res) == 2
@pytest.mark.asyncio
async def test_bulk_create_with_relation():
async with base_ormar_config.database:
category = await Category.objects.create(name="Sample Category")
await Note.objects.bulk_create(
[
Note(text="Buy the groceries.", category=category),
Note(text="Call Mum.", category=category),
]
)
todoes = await Note.objects.all()
assert len(todoes) == 2
for todo in todoes:
assert todo.category.pk == category.pk
@pytest.mark.asyncio
async def test_bulk_update():
async with base_ormar_config.database:
await ToDo.objects.bulk_create(
[
ToDo(text="Buy the groceries."),
ToDo(text="Call Mum.", completed=True),
ToDo(text="Send invoices.", completed=True),
]
)
todoes = await ToDo.objects.all()
assert len(todoes) == 3
for todo in todoes:
todo.text = todo.text + "_1"
todo.completed = False
todo.size = MySize.BIG
await ToDo.objects.bulk_update(todoes)
completed = await ToDo.objects.filter(completed=False).all()
assert len(completed) == 3
todoes = await ToDo.objects.all()
assert len(todoes) == 3
for todo in todoes:
assert todo.text[-2:] == "_1"
assert todo.size == MySize.BIG
@pytest.mark.asyncio
async def test_bulk_update_with_only_selected_columns():
async with base_ormar_config.database:
await ToDo.objects.bulk_create(
[
ToDo(text="Reset the world simulation.", completed=False),
ToDo(text="Watch kittens.", completed=True),
]
)
todoes = await ToDo.objects.all()
assert len(todoes) == 2
for todo in todoes:
todo.text = todo.text + "_1"
todo.completed = False
await ToDo.objects.bulk_update(todoes, columns=["completed"])
completed = await ToDo.objects.filter(completed=False).all()
assert len(completed) == 2
todoes = await ToDo.objects.all()
assert len(todoes) == 2
for todo in todoes:
assert todo.text[-2:] != "_1"
@pytest.mark.asyncio
async def test_bulk_update_with_relation():
async with base_ormar_config.database:
category = await Category.objects.create(name="Sample Category")
category2 = await Category.objects.create(name="Sample II Category")
await Note.objects.bulk_create(
[
Note(text="Buy the groceries.", category=category),
Note(text="Call Mum.", category=category),
Note(text="Text skynet.", category=category),
]
)
notes = await Note.objects.all()
assert len(notes) == 3
for note in notes:
note.category = category2
await Note.objects.bulk_update(notes)
notes_upd = await Note.objects.all()
assert len(notes_upd) == 3
for note in notes_upd:
assert note.category.pk == category2.pk
@pytest.mark.asyncio
async def test_bulk_update_not_saved_objts():
async with base_ormar_config.database:
category = await Category.objects.create(name="Sample Category")
with pytest.raises(ModelPersistenceError):
await Note.objects.bulk_update(
[
Note(text="Buy the groceries.", category=category),
Note(text="Call Mum.", category=category),
]
)
with pytest.raises(ModelListEmptyError):
await Note.objects.bulk_update([])
@pytest.mark.asyncio
async def test_bulk_operations_with_json():
async with base_ormar_config.database:
items = [
ItemConfig(item_id="test1"),
ItemConfig(item_id="test2"),
ItemConfig(item_id="test3"),
]
await ItemConfig.objects.bulk_create(items)
items = await ItemConfig.objects.all()
assert all(x.pairs == ["2", "3"] for x in items)
for item in items:
item.pairs = ["1"]
await ItemConfig.objects.bulk_update(items)
items = await ItemConfig.objects.all()
assert all(x.pairs == ["1"] for x in items)
items = await ItemConfig.objects.filter(ItemConfig.id > 1).all()
for item in items:
item.pairs = {"b": 2}
await ItemConfig.objects.bulk_update(items)
items = await ItemConfig.objects.filter(ItemConfig.id > 1).all()
assert all(x.pairs == {"b": 2} for x in items)
table = ItemConfig.ormar_config.table
query = table.select().where(table.c.pairs["b"].as_integer() == 2)
res = [
ItemConfig.from_row(record, source_model=ItemConfig)
for record in await base_ormar_config.database.fetch_all(query)
]
assert len(res) == 2
@pytest.mark.asyncio
async def test_custom_queryset_cls():
async with base_ormar_config.database:
with pytest.raises(ValueError):
await Customer.objects.first_or_404(id=1)
await Customer(name="test").save()
c = await Customer.objects.first_or_404(name="test")
assert c.name == "test"
@pytest.mark.asyncio
async def test_filter_enum():
async with base_ormar_config.database:
it = ItemConfig(item_id="test_1")
await it.save()
it = await ItemConfig.objects.filter(size=MySize.SMALL).first()
assert it
collerek-ormar-c09209a/tests/test_queries/test_quoting_table_names_in_on_join_clause.py 0000664 0000000 0000000 00000002525 15130200524 0032021 0 ustar 00root root 0000000 0000000 import datetime
import uuid
from typing import Dict, Optional, Union
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Team(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="team")
id: uuid.UUID = ormar.UUID(default=uuid.uuid4, primary_key=True, index=True)
name = ormar.Text(nullable=True)
client_id = ormar.Text(nullable=True)
client_secret = ormar.Text(nullable=True)
created_on = ormar.DateTime(timezone=True, default=datetime.datetime.utcnow())
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="user")
id: uuid.UUID = ormar.UUID(default=uuid.uuid4, primary_key=True, index=True)
client_user_id = ormar.Text()
token = ormar.Text(nullable=True)
team: Optional[Team] = ormar.ForeignKey(to=Team, name="team_id")
class Order(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="order")
id: uuid.UUID = ormar.UUID(default=uuid.uuid4, primary_key=True, index=True)
user: Optional[Union[User, Dict]] = ormar.ForeignKey(User)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_quoting_on_clause_without_prefix():
async with base_ormar_config.database:
await User.objects.select_related("orders").all()
collerek-ormar-c09209a/tests/test_queries/test_reserved_sql_keywords_escaped.py 0000664 0000000 0000000 00000005112 15130200524 0030350 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="user")
id: int = ormar.Integer(primary_key=True, autoincrement=True, nullable=False)
user: str = ormar.String(
unique=True, index=True, nullable=False, max_length=255
) # ID of the user on auth0
first: str = ormar.String(nullable=False, max_length=255)
last: str = ormar.String(nullable=False, max_length=255)
email: str = ormar.String(unique=True, index=True, nullable=False, max_length=255)
display_name: str = ormar.String(
unique=True, index=True, nullable=False, max_length=255
)
pic_url: str = ormar.Text(nullable=True)
class Task(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="task")
id: int = ormar.Integer(primary_key=True, autoincrement=True, nullable=False)
from_: str = ormar.String(name="from", nullable=True, max_length=200)
user = ormar.ForeignKey(User)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_single_model_quotes():
async with base_ormar_config.database:
await User.objects.create(
user="test",
first="first",
last="last",
email="email@com.com",
display_name="first last",
)
user = await User.objects.order_by("user").get(first="first")
assert user.last == "last"
assert user.email == "email@com.com"
@pytest.mark.asyncio
async def test_two_model_quotes():
async with base_ormar_config.database:
user = await User.objects.create(
user="test",
first="first",
last="last",
email="email@com.com",
display_name="first last",
)
await Task(user=user, from_="aa").save()
await Task(user=user, from_="bb").save()
task = (
await Task.objects.select_related("user")
.order_by("user__user")
.get(from_="aa")
)
assert task.user.last == "last"
assert task.user.email == "email@com.com"
tasks = await Task.objects.select_related("user").order_by("-from").all()
assert len(tasks) == 2
assert tasks[0].user.last == "last"
assert tasks[0].user.email == "email@com.com"
assert tasks[0].from_ == "bb"
assert tasks[1].user.last == "last"
assert tasks[1].user.email == "email@com.com"
assert tasks[1].from_ == "aa"
collerek-ormar-c09209a/tests/test_queries/test_reverse_fk_queryset.py 0000664 0000000 0000000 00000020350 15130200524 0026334 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from ormar import NoMatch
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True, name="album_id")
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
class Writer(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="writers")
id: int = ormar.Integer(primary_key=True, name="writer_id")
name: str = ormar.String(max_length=100)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tracks")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album, name="album_id")
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True)
written_by: Optional[Writer] = ormar.ForeignKey(Writer, name="writer_id")
async def get_sample_data():
album = await Album(name="Malibu").save()
writer1 = await Writer.objects.create(name="John")
writer2 = await Writer.objects.create(name="Sue")
track1 = await Track(
album=album, title="The Bird", position=1, play_count=30, written_by=writer1
).save()
track2 = await Track(
album=album,
title="Heart don't stand a chance",
position=2,
play_count=20,
written_by=writer2,
).save()
tracks3 = await Track(
album=album, title="The Waters", position=3, play_count=10, written_by=writer1
).save()
return album, [track1, track2, tracks3]
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_quering_by_reverse_fk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
sample_data = await get_sample_data()
track1 = sample_data[1][0]
album = await Album.objects.first()
assert await album.tracks.exists()
assert await album.tracks.count() == 3
track, created = await album.tracks.get_or_create(
title="The Bird", position=1, play_count=30
)
assert track == track1
assert created is False
assert len(album.tracks) == 1
track, created = await album.tracks.get_or_create(
title="The Bird2", _defaults={"position": 4, "play_count": 5}
)
assert track != track1
assert created is True
assert track.pk is not None
assert track.position == 4 and track.play_count == 5
assert len(album.tracks) == 2
await album.tracks.update_or_create(pk=track.pk, play_count=50)
assert len(album.tracks) == 2
track, created = await album.tracks.get_or_create(title="The Bird2")
assert created is False
assert track.play_count == 50
assert len(album.tracks) == 1
await album.tracks.remove(track)
assert track.album is None
await track.delete()
assert len(album.tracks) == 0
track6 = await album.tracks.update_or_create(
title="The Bird3", position=4, play_count=5
)
assert track6.pk is not None
assert track6.play_count == 5
assert len(album.tracks) == 1
await album.tracks.remove(track6)
assert track6.album is None
await track6.delete()
assert len(album.tracks) == 0
@pytest.mark.asyncio
async def test_getting():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
sample_data = await get_sample_data()
album = sample_data[0]
track1 = await album.tracks.fields(["album", "title", "position"]).get(
title="The Bird"
)
track2 = await album.tracks.exclude_fields("play_count").get(
title="The Bird"
)
for track in [track1, track2]:
assert track.title == "The Bird"
assert track.album == album
assert track.play_count is None
assert len(album.tracks) == 1
tracks = await album.tracks.all()
assert len(tracks) == 3
assert len(album.tracks) == 3
tracks = await album.tracks.order_by("play_count").all()
assert len(tracks) == 3
assert tracks[0].title == "The Waters"
assert tracks[2].title == "The Bird"
assert len(album.tracks) == 3
track = await album.tracks.create(
title="The Bird Fly Away", position=4, play_count=10
)
assert track.title == "The Bird Fly Away"
assert track.position == 4
assert track.album == album
assert len(album.tracks) == 4
tracks = await album.tracks.all()
assert len(tracks) == 4
tracks = await album.tracks.limit(2).all()
assert len(tracks) == 2
tracks2 = await album.tracks.limit(2).offset(2).all()
assert len(tracks2) == 2
assert tracks != tracks2
tracks3 = await album.tracks.filter(play_count__lt=15).all()
assert len(tracks3) == 2
tracks4 = await album.tracks.exclude(play_count__lt=15).all()
assert len(tracks4) == 2
assert tracks3 != tracks4
assert len(album.tracks) == 2
await album.tracks.clear()
tracks = await album.tracks.all()
assert len(tracks) == 0
assert len(album.tracks) == 0
still_tracks = await Track.objects.all()
assert len(still_tracks) == 4
for track in still_tracks:
assert track.album is None
@pytest.mark.asyncio
async def test_cleaning_related():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
sample_data = await get_sample_data()
album = sample_data[0]
await album.tracks.clear(keep_reversed=False)
tracks = await album.tracks.all()
assert len(tracks) == 0
assert len(album.tracks) == 0
no_tracks = await Track.objects.all()
assert len(no_tracks) == 0
@pytest.mark.asyncio
async def test_loading_related():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
sample_data = await get_sample_data()
album = sample_data[0]
tracks = await album.tracks.select_related("written_by").all()
assert len(tracks) == 3
assert len(album.tracks) == 3
for track in tracks:
assert track.written_by is not None
tracks = await album.tracks.prefetch_related("written_by").all()
assert len(tracks) == 3
assert len(album.tracks) == 3
for track in tracks:
assert track.written_by is not None
@pytest.mark.asyncio
async def test_adding_removing():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
sample_data = await get_sample_data()
album = sample_data[0]
track_new = await Track(title="Rainbow", position=5, play_count=300).save()
await album.tracks.add(track_new)
assert track_new.album == album
assert len(album.tracks) == 4
track_check = await Track.objects.get(title="Rainbow")
assert track_check.album == album
await album.tracks.remove(track_new)
assert track_new.album is None
assert len(album.tracks) == 3
track1 = album.tracks[0]
await album.tracks.remove(track1, keep_reversed=False)
with pytest.raises(NoMatch):
await track1.load()
track_test = await Track.objects.get(title="Rainbow")
assert track_test.album is None
collerek-ormar-c09209a/tests/test_queries/test_selecting_subset_of_columns.py 0000664 0000000 0000000 00000021313 15130200524 0030026 0 ustar 00root root 0000000 0000000 import itertools
from typing import List, Optional
import ormar
import pydantic
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class NickNames(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
is_lame: bool = ormar.Boolean(nullable=True)
class NicksHq(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="nicks_x_hq")
class HQ(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="hqs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="hq_name")
nicks: List[NickNames] = ormar.ManyToMany(NickNames, through=NicksHq)
class Company(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="companies")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=False, name="company_name")
founded: int = ormar.Integer(nullable=True)
hq: HQ = ormar.ForeignKey(HQ)
class Car(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="cars")
id: int = ormar.Integer(primary_key=True)
manufacturer: Optional[Company] = ormar.ForeignKey(Company)
name: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
gearbox_type: str = ormar.String(max_length=20, nullable=True)
gears: int = ormar.Integer(nullable=True)
aircon_type: str = ormar.String(max_length=20, nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(autouse=True, scope="module")
@pytest.mark.usefixtures("create_test_database")
async def sample_data():
async with base_ormar_config.database:
nick1 = await NickNames.objects.create(name="Nippon", is_lame=False)
nick2 = await NickNames.objects.create(name="EroCherry", is_lame=True)
hq = await HQ.objects.create(name="Japan")
await hq.nicks.add(nick1)
await hq.nicks.add(nick2)
toyota = await Company.objects.create(name="Toyota", founded=1937, hq=hq)
await Car.objects.create(
manufacturer=toyota,
name="Corolla",
year=2020,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Yaris",
year=2019,
gearbox_type="Manual",
gears=5,
aircon_type="Manual",
)
await Car.objects.create(
manufacturer=toyota,
name="Supreme",
year=2020,
gearbox_type="Auto",
gears=6,
aircon_type="Auto",
)
@pytest.mark.asyncio
async def test_selecting_subset():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
all_cars = (
await Car.objects.select_related(["manufacturer__hq__nicks"])
.fields(
[
"id",
"name",
"manufacturer__name",
"manufacturer__hq__name",
"manufacturer__hq__nicks__name",
]
)
.all()
)
all_cars2 = (
await Car.objects.select_related(["manufacturer__hq__nicks"])
.fields(
{
"id": ...,
"name": ...,
"manufacturer": {
"name": ...,
"hq": {"name": ..., "nicks": {"name": ...}},
},
}
)
.all()
)
all_cars3 = (
await Car.objects.select_related(["manufacturer__hq__nicks"])
.fields(
{
"id": ...,
"name": ...,
"manufacturer": {
"name": ...,
"hq": {"name": ..., "nicks": {"name"}},
},
}
)
.all()
)
assert all_cars3 == all_cars
for car in itertools.chain(all_cars, all_cars2):
assert all(
getattr(car, x) is None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
assert car.manufacturer.name == "Toyota"
assert car.manufacturer.founded is None
assert car.manufacturer.hq.name == "Japan"
assert len(car.manufacturer.hq.nicks) == 2
assert car.manufacturer.hq.nicks[0].is_lame is None
all_cars = (
await Car.objects.select_related("manufacturer")
.fields("id")
.fields(["name"])
.all()
)
for car in all_cars:
assert all(
getattr(car, x) is None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
assert car.manufacturer.name == "Toyota"
assert car.manufacturer.founded == 1937
assert car.manufacturer.hq.name is None
all_cars_check = await Car.objects.select_related("manufacturer").all()
all_cars_with_whole_nested = (
await Car.objects.select_related("manufacturer")
.fields(["id", "name", "year", "gearbox_type", "gears", "aircon_type"])
.fields({"manufacturer": ...})
.all()
)
for car in itertools.chain(all_cars_check, all_cars_with_whole_nested):
assert all(
getattr(car, x) is not None
for x in ["year", "gearbox_type", "gears", "aircon_type"]
)
assert car.manufacturer.name == "Toyota"
assert car.manufacturer.founded == 1937
all_cars_dummy = (
await Car.objects.select_related("manufacturer")
.fields(["id", "name", "year", "gearbox_type", "gears", "aircon_type"])
# .fields({"manufacturer": ...})
# .exclude_fields({"manufacturer": ...})
.fields({"manufacturer": {"name"}})
.exclude_fields({"manufacturer__founded"})
.all()
)
assert all_cars_dummy[0].manufacturer.founded is None
with pytest.raises(pydantic.ValidationError):
# cannot exclude mandatory model columns - company__name in this example
await Car.objects.select_related("manufacturer").fields(
["id", "name", "manufacturer__founded"]
).all()
@pytest.mark.asyncio
async def test_selecting_subset_of_through_model():
async with base_ormar_config.database:
car = (
await Car.objects.select_related(["manufacturer__hq__nicks"])
.fields(
{
"id": ...,
"name": ...,
"manufacturer": {
"name": ...,
"hq": {"name": ..., "nicks": {"name": ...}},
},
}
)
.exclude_fields("manufacturer__hq__nickshq")
.get()
)
assert car.manufacturer.hq.nicks[0].nickshq is None
car = (
await Car.objects.select_related(["manufacturer__hq__nicks"])
.fields(
{
"id": ...,
"name": ...,
"manufacturer": {
"name": ...,
"hq": {"name": ..., "nicks": {"name": ...}},
},
}
)
.exclude_fields({"manufacturer": {"hq": {"nickshq": ...}}})
.get()
)
assert car.manufacturer.hq.nicks[0].nickshq is None
car = (
await Car.objects.select_related(["manufacturer__hq__nicks"])
.fields(
{
"id": ...,
"name": ...,
"manufacturer": {
"name": ...,
"hq": {"name": ..., "nicks": {"name": ...}},
},
}
)
.exclude_fields("manufacturer__hq__nickshq__nick")
.get()
)
assert car.manufacturer.hq.nicks[0].nickshq is not None
collerek-ormar-c09209a/tests/test_queries/test_values_and_values_list.py 0000664 0000000 0000000 00000033074 15130200524 0027002 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
import pytest_asyncio
from ormar.exceptions import QueryDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Role(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
users: List[User] = ormar.ManyToMany(User)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
sort_order: int = ormar.Integer(nullable=True)
created_by: Optional[User] = ormar.ForeignKey(User, related_name="categories")
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
category: Optional[Category] = ormar.ForeignKey(Category)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(autouse=True, scope="module")
@pytest.mark.usefixtures("create_test_database")
async def sample_data():
async with base_ormar_config.database:
creator = await User(name="Anonymous").save()
admin = await Role(name="admin").save()
editor = await Role(name="editor").save()
await creator.roles.add(admin)
await creator.roles.add(editor)
news = await Category(name="News", sort_order=0, created_by=creator).save()
await Post(name="Ormar strikes again!", category=news).save()
await Post(name="Why don't you use ormar yet?", category=news).save()
await Post(name="Check this out, ormar now for free", category=news).save()
@pytest.mark.asyncio
async def test_simple_queryset_values():
async with base_ormar_config.database:
posts = await Post.objects.values()
assert posts == [
{"id": 1, "name": "Ormar strikes again!", "category": 1},
{"id": 2, "name": "Why don't you use ormar yet?", "category": 1},
{"id": 3, "name": "Check this out, ormar now for free", "category": 1},
]
@pytest.mark.asyncio
async def test_queryset_values_nested_relation():
async with base_ormar_config.database:
posts = await Post.objects.select_related("category__created_by").values()
assert posts == [
{
"id": 1,
"name": "Ormar strikes again!",
"category": 1,
"category__id": 1,
"category__name": "News",
"category__sort_order": 0,
"category__created_by": 1,
"category__created_by__id": 1,
"category__created_by__name": "Anonymous",
},
{
"category": 1,
"id": 2,
"name": "Why don't you use ormar yet?",
"category__id": 1,
"category__name": "News",
"category__sort_order": 0,
"category__created_by": 1,
"category__created_by__id": 1,
"category__created_by__name": "Anonymous",
},
{
"id": 3,
"name": "Check this out, ormar now for free",
"category": 1,
"category__id": 1,
"category__name": "News",
"category__sort_order": 0,
"category__created_by": 1,
"category__created_by__id": 1,
"category__created_by__name": "Anonymous",
},
]
@pytest.mark.asyncio
async def test_queryset_values_nested_relation_subset_of_fields():
async with base_ormar_config.database:
posts = await Post.objects.select_related("category__created_by").values(
["name", "category__name", "category__created_by__name"]
)
assert posts == [
{
"name": "Ormar strikes again!",
"category__name": "News",
"category__created_by__name": "Anonymous",
},
{
"name": "Why don't you use ormar yet?",
"category__name": "News",
"category__created_by__name": "Anonymous",
},
{
"name": "Check this out, ormar now for free",
"category__name": "News",
"category__created_by__name": "Anonymous",
},
]
@pytest.mark.asyncio
async def test_queryset_simple_values_list():
async with base_ormar_config.database:
posts = await Post.objects.values_list()
assert posts == [
(1, "Ormar strikes again!", 1),
(2, "Why don't you use ormar yet?", 1),
(3, "Check this out, ormar now for free", 1),
]
@pytest.mark.asyncio
async def test_queryset_nested_relation_values_list():
async with base_ormar_config.database:
posts = await Post.objects.select_related("category__created_by").values_list()
assert posts == [
(1, "Ormar strikes again!", 1, 1, "News", 0, 1, 1, "Anonymous"),
(2, "Why don't you use ormar yet?", 1, 1, "News", 0, 1, 1, "Anonymous"),
(
3,
"Check this out, ormar now for free",
1,
1,
"News",
0,
1,
1,
"Anonymous",
),
]
@pytest.mark.asyncio
async def test_queryset_nested_relation_subset_of_fields_values_list():
async with base_ormar_config.database:
posts = await Post.objects.select_related("category__created_by").values_list(
["name", "category__name", "category__created_by__name"]
)
assert posts == [
("Ormar strikes again!", "News", "Anonymous"),
("Why don't you use ormar yet?", "News", "Anonymous"),
("Check this out, ormar now for free", "News", "Anonymous"),
]
@pytest.mark.asyncio
async def test_m2m_values():
async with base_ormar_config.database:
user = await User.objects.select_related("roles").values()
assert user == [
{
"id": 1,
"name": "Anonymous",
"roleuser__id": 1,
"roleuser__role": 1,
"roleuser__user": 1,
"roles__id": 1,
"roles__name": "admin",
},
{
"id": 1,
"name": "Anonymous",
"roleuser__id": 2,
"roleuser__role": 2,
"roleuser__user": 1,
"roles__id": 2,
"roles__name": "editor",
},
]
@pytest.mark.asyncio
async def test_nested_m2m_values():
async with base_ormar_config.database:
user = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.values()
)
assert user == [
{
"id": 1,
"name": "admin",
"roleuser__id": 1,
"roleuser__role": 1,
"roleuser__user": 1,
"users__id": 1,
"users__name": "Anonymous",
"users__categories__id": 1,
"users__categories__name": "News",
"users__categories__sort_order": 0,
"users__categories__created_by": 1,
}
]
@pytest.mark.asyncio
async def test_nested_m2m_values_without_through_explicit():
async with base_ormar_config.database:
user = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
.exclude_fields("roleuser")
.values()
)
assert user == [
{
"name": "admin",
"users__name": "Anonymous",
"users__categories__name": "News",
}
]
@pytest.mark.asyncio
async def test_nested_m2m_values_without_through_param():
async with base_ormar_config.database:
user = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
.values(exclude_through=True)
)
assert user == [
{
"name": "admin",
"users__name": "Anonymous",
"users__categories__name": "News",
}
]
@pytest.mark.asyncio
async def test_nested_m2m_values_no_through_and_m2m_models_but_keep_end_model():
async with base_ormar_config.database:
user = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
.exclude_fields(["roleuser", "users"])
.values()
)
assert user == [{"name": "admin", "users__categories__name": "News"}]
@pytest.mark.asyncio
async def test_nested_flatten_and_exception():
async with base_ormar_config.database:
with pytest.raises(QueryDefinitionError):
(await Role.objects.fields({"name", "id"}).values_list(flatten=True))
roles = await Role.objects.fields("name").values_list(flatten=True)
assert roles == ["admin", "editor"]
@pytest.mark.asyncio
async def test_empty_result():
async with base_ormar_config.database:
roles = await Role.objects.filter(Role.name == "test").values_list()
roles2 = await Role.objects.filter(Role.name == "test").values()
assert roles == roles2 == []
@pytest.mark.asyncio
async def test_queryset_values_multiple_select_related():
async with base_ormar_config.database:
posts = (
await Category.objects.select_related(["created_by__roles", "posts"])
.filter(Category.created_by.roles.name == "editor")
.values(
["name", "posts__name", "created_by__name", "created_by__roles__name"],
exclude_through=True,
)
)
assert posts == [
{
"name": "News",
"created_by__name": "Anonymous",
"created_by__roles__name": "editor",
"posts__name": "Ormar strikes again!",
},
{
"name": "News",
"created_by__name": "Anonymous",
"created_by__roles__name": "editor",
"posts__name": "Why don't you use ormar yet?",
},
{
"name": "News",
"created_by__name": "Anonymous",
"created_by__roles__name": "editor",
"posts__name": "Check this out, ormar now for free",
},
]
@pytest.mark.asyncio
async def test_querysetproxy_values():
async with base_ormar_config.database:
role = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.get()
)
user = await role.users.values()
assert user == [
{
"id": 1,
"name": "Anonymous",
"roles__id": 1,
"roles__name": "admin",
"roleuser__id": 1,
"roleuser__role": 1,
"roleuser__user": 1,
}
]
user = (
await role.users.filter(name="Anonymous")
.select_related("categories")
.fields({"name": ..., "categories": {"name"}})
.values(exclude_through=True)
)
assert user == [
{
"name": "Anonymous",
"roles__id": 1,
"roles__name": "admin",
"categories__name": "News",
}
]
user = (
await role.users.filter(name="Anonymous")
.select_related("categories")
.fields({"name": ..., "categories": {"name"}})
.exclude_fields("roles")
.values(exclude_through=True)
)
assert user == [{"name": "Anonymous", "categories__name": "News"}]
@pytest.mark.asyncio
async def test_querysetproxy_values_list():
async with base_ormar_config.database:
role = (
await Role.objects.select_related("users__categories")
.filter(name="admin")
.get()
)
user = await role.users.values_list()
assert user == [(1, "Anonymous", 1, 1, 1, 1, "admin")]
user = (
await role.users.filter(name="Anonymous")
.select_related("categories")
.fields({"name": ..., "categories": {"name"}})
.values_list(exclude_through=True)
)
assert user == [("Anonymous", "News", 1, "admin")]
user = (
await role.users.filter(name="Anonymous")
.select_related("categories")
.fields({"name": ..., "categories": {"name"}})
.exclude_fields("roles")
.values_list(exclude_through=True)
)
assert user == [("Anonymous", "News")]
user = (
await role.users.filter(name="Anonymous")
.select_related("categories")
.fields({"name"})
.exclude_fields("roles")
.values_list(exclude_through=True, flatten=True)
)
assert user == ["Anonymous"]
collerek-ormar-c09209a/tests/test_relations/ 0000775 0000000 0000000 00000000000 15130200524 0021152 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_relations/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0023251 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_relations/test_cascades.py 0000664 0000000 0000000 00000007440 15130200524 0024336 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Band(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="bands")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class ArtistsBands(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="artists_x_bands")
id: int = ormar.Integer(primary_key=True)
class Artist(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="artists")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
bands = ormar.ManyToMany(Band, through=ArtistsBands)
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
artist: Optional[Artist] = ormar.ForeignKey(Artist, ondelete="CASCADE")
class Track(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tracks")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album, ondelete="CASCADE")
title: str = ormar.String(max_length=100)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
await Band.objects.delete(each=True)
await Artist.objects.delete(each=True)
@pytest.mark.asyncio
async def test_simple_cascade(cleanup):
async with base_ormar_config.database:
artist = await Artist(name="Dr Alban").save()
await Album(name="Jamaica", artist=artist).save()
await Artist.objects.delete(id=artist.id)
artists = await Artist.objects.all()
assert len(artists) == 0
albums = await Album.objects.all()
assert len(albums) == 0
@pytest.mark.asyncio
async def test_nested_cascade(cleanup):
async with base_ormar_config.database:
artist = await Artist(name="Dr Alban").save()
album = await Album(name="Jamaica", artist=artist).save()
await Track(title="Yuhu", album=album).save()
await Artist.objects.delete(id=artist.id)
artists = await Artist.objects.all()
assert len(artists) == 0
albums = await Album.objects.all()
assert len(albums) == 0
tracks = await Track.objects.all()
assert len(tracks) == 0
@pytest.mark.asyncio
async def test_many_to_many_cascade(cleanup):
async with base_ormar_config.database:
artist = await Artist(name="Dr Alban").save()
band = await Band(name="Scorpions").save()
await artist.bands.add(band)
check = await Artist.objects.select_related("bands").get()
assert check.bands[0].name == "Scorpions"
await Artist.objects.delete(id=artist.id)
artists = await Artist.objects.all()
assert len(artists) == 0
bands = await Band.objects.all()
assert len(bands) == 1
connections = await ArtistsBands.objects.all()
assert len(connections) == 0
@pytest.mark.asyncio
async def test_reverse_many_to_many_cascade(cleanup):
async with base_ormar_config.database:
artist = await Artist(name="Dr Alban").save()
band = await Band(name="Scorpions").save()
await artist.bands.add(band)
check = await Artist.objects.select_related("bands").get()
assert check.bands[0].name == "Scorpions"
await Band.objects.delete(id=band.id)
artists = await Artist.objects.all()
assert len(artists) == 1
connections = await ArtistsBands.objects.all()
assert len(connections) == 0
bands = await Band.objects.all()
assert len(bands) == 0
collerek-ormar-c09209a/tests/test_relations/test_customizing_through_model_relation_names.py 0000664 0000000 0000000 00000004466 15130200524 0033150 0 ustar 00root root 0000000 0000000 import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Course(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
course_name: str = ormar.String(max_length=100)
class Student(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
courses = ormar.ManyToMany(
Course,
through_relation_name="student_id",
through_reverse_relation_name="course_id",
)
create_test_database = init_tests(base_ormar_config)
def test_tables_columns():
through_config = Student.ormar_config.model_fields["courses"].through.ormar_config
assert "course_id" in through_config.table.c
assert "student_id" in through_config.table.c
assert "course_id" in through_config.model_fields
assert "student_id" in through_config.model_fields
@pytest.mark.asyncio
async def test_working_with_changed_through_names():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
to_save = {
"course_name": "basic1",
"students": [{"name": "Jack"}, {"name": "Abi"}],
}
await Course(**to_save).save_related(follow=True, save_all=True)
course_check = await Course.objects.select_related("students").get()
assert course_check.course_name == "basic1"
assert course_check.students[0].name == "Jack"
assert course_check.students[1].name == "Abi"
students = await course_check.students.all()
assert len(students) == 2
student = await course_check.students.get(name="Jack")
assert student.name == "Jack"
students = await Student.objects.select_related("courses").all(
courses__course_name="basic1"
)
assert len(students) == 2
course_check = (
await Course.objects.select_related("students")
.order_by("students__name")
.get()
)
assert course_check.students[0].name == "Abi"
assert course_check.students[1].name == "Jack"
collerek-ormar-c09209a/tests/test_relations/test_database_fk_creation.py 0000664 0000000 0000000 00000005503 15130200524 0026676 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
import sqlalchemy
from ormar.fields.foreign_key import validate_referential_action
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Artist(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="artists")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
artist: Optional[Artist] = ormar.ForeignKey(Artist, ondelete="CASCADE")
class A(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=64, nullalbe=False)
class B(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=64, nullalbe=False)
a: A = ormar.ForeignKey(to=A, ondelete=ormar.ReferentialAction.CASCADE)
class C(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=64, nullalbe=False)
b: B = ormar.ForeignKey(to=B, ondelete=ormar.ReferentialAction.CASCADE)
create_test_database = init_tests(base_ormar_config)
def test_simple_cascade():
inspector = sqlalchemy.inspect(base_ormar_config.engine)
columns = inspector.get_columns("albums")
assert len(columns) == 3
col_names = [col.get("name") for col in columns]
assert sorted(["id", "name", "artist"]) == sorted(col_names)
fks = inspector.get_foreign_keys("albums")
assert len(fks) == 1
assert fks[0]["name"] == "fk_albums_artists_id_artist"
assert fks[0]["constrained_columns"][0] == "artist"
assert fks[0]["referred_columns"][0] == "id"
assert fks[0]["options"].get("ondelete") == "CASCADE"
def test_validations_referential_action():
CASCADE = ormar.ReferentialAction.CASCADE.value
assert validate_referential_action(None) is None
assert validate_referential_action("cascade") == CASCADE
assert validate_referential_action(ormar.ReferentialAction.CASCADE) == CASCADE
with pytest.raises(ormar.ModelDefinitionError):
validate_referential_action("NOT VALID")
@pytest.mark.asyncio
async def test_cascade_clear():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
a = await A.objects.create(name="a")
b = await B.objects.create(name="b", a=a)
await C.objects.create(name="c", b=b)
await a.bs.clear(keep_reversed=False)
assert await B.objects.count() == 0
assert await C.objects.count() == 0
collerek-ormar-c09209a/tests/test_relations/test_foreign_keys.py 0000664 0000000 0000000 00000036640 15130200524 0025260 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pytest
from ormar.exceptions import MultipleMatches, NoMatch, RelationshipInstanceError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tracks")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True, default=0)
is_disabled: bool = ormar.Boolean(default=False)
class Cover(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="covers")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album, related_name="cover_pictures")
title: str = ormar.String(max_length=100)
class Organisation(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="org")
id: int = ormar.Integer(primary_key=True)
ident: str = ormar.String(max_length=100)
class Team(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="teams")
id: int = ormar.Integer(primary_key=True)
org: Optional[Organisation] = ormar.ForeignKey(Organisation)
name: str = ormar.String(max_length=100)
class Member(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="members")
id: int = ormar.Integer(primary_key=True)
team: Optional[Team] = ormar.ForeignKey(Team)
email: str = ormar.String(max_length=100)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_wrong_query_foreign_key_type():
async with base_ormar_config.database:
with pytest.raises(RelationshipInstanceError):
Track(title="The Error", album="wrong_pk_type")
@pytest.mark.asyncio
async def test_setting_explicitly_empty_relation():
async with base_ormar_config.database:
track = Track(album=None, title="The Bird", position=1)
assert track.album is None
@pytest.mark.asyncio
async def test_related_name():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
album = await Album.objects.create(name="Vanilla")
await Cover.objects.create(album=album, title="The cover file")
assert len(album.cover_pictures) == 1
@pytest.mark.asyncio
async def test_model_crud():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
album = Album(name="Jamaica")
await album.save()
track1 = Track(album=album, title="The Bird", position=1)
track2 = Track(album=album, title="Heart don't stand a chance", position=2)
track3 = Track(album=album, title="The Waters", position=3)
await track1.save()
await track2.save()
await track3.save()
track = await Track.objects.get(title="The Bird")
assert track.album.pk == album.pk
assert isinstance(track.album, ormar.Model)
assert track.album.name is None
await track.album.load()
assert track.album.name == "Jamaica"
assert len(album.tracks) == 3
assert album.tracks[1].title == "Heart don't stand a chance"
album1 = await Album.objects.get(name="Jamaica")
assert album1.pk == album.pk
assert album1.tracks == []
await Track.objects.create(
album={"id": track.album.pk}, title="The Bird2", position=4
)
@pytest.mark.asyncio
async def test_select_related():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
album = Album(name="Malibu")
await album.save()
track1 = Track(album=album, title="The Bird", position=1)
track2 = Track(album=album, title="Heart don't stand a chance", position=2)
track3 = Track(album=album, title="The Waters", position=3)
await track1.save()
await track2.save()
await track3.save()
fantasies = Album(name="Fantasies")
await fantasies.save()
track4 = Track(album=fantasies, title="Help I'm Alive", position=1)
track5 = Track(album=fantasies, title="Sick Muse", position=2)
track6 = Track(album=fantasies, title="Satellite Mind", position=3)
await track4.save()
await track5.save()
await track6.save()
track = await Track.objects.select_related("album").get(title="The Bird")
assert track.album.name == "Malibu"
tracks = await Track.objects.select_related("album").all()
assert len(tracks) == 6
@pytest.mark.asyncio
async def test_model_removal_from_relations():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
album = Album(name="Chichi")
await album.save()
track1 = Track(album=album, title="The Birdman", position=1)
track2 = Track(album=album, title="Superman", position=2)
track3 = Track(album=album, title="Wonder Woman", position=3)
await track1.save()
await track2.save()
await track3.save()
assert len(album.tracks) == 3
await album.tracks.remove(track1)
assert len(album.tracks) == 2
assert track1.album is None
await track1.update()
track1 = await Track.objects.get(title="The Birdman")
assert track1.album is None
await album.tracks.add(track1)
assert len(album.tracks) == 3
assert track1.album == album
await track1.update()
track1 = await Track.objects.select_related("album__tracks").get(
title="The Birdman"
)
album = await Album.objects.select_related("tracks").get(name="Chichi")
assert track1.album == album
track1.remove(album, name="album")
assert track1.album is None
assert len(album.tracks) == 2
track2.remove(album, name="album")
assert track2.album is None
assert len(album.tracks) == 1
@pytest.mark.asyncio
async def test_fk_filter():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
malibu = Album(name="Malibu%")
await malibu.save()
await Track.objects.create(album=malibu, title="The Bird", position=1)
await Track.objects.create(
album=malibu, title="Heart don't stand a chance", position=2
)
await Track.objects.create(album=malibu, title="The Waters", position=3)
fantasies = await Album.objects.create(name="Fantasies")
await Track.objects.create(
album=fantasies, title="Help I'm Alive", position=1
)
await Track.objects.create(album=fantasies, title="Sick Muse", position=2)
await Track.objects.create(
album=fantasies, title="Satellite Mind", position=3
)
tracks = (
await Track.objects.select_related("album")
.filter(album__name="Fantasies")
.all()
)
assert len(tracks) == 3
for track in tracks:
assert track.album.name == "Fantasies"
tracks = (
await Track.objects.select_related("album")
.filter(album__name__icontains="fan")
.all()
)
assert len(tracks) == 3
for track in tracks:
assert track.album.name == "Fantasies"
tracks = await Track.objects.filter(album__name__contains="Fan").all()
assert len(tracks) == 3
for track in tracks:
assert track.album.name == "Fantasies"
tracks = await Track.objects.filter(album__name__contains="Malibu%").all()
assert len(tracks) == 3
tracks = (
await Track.objects.filter(album=malibu).select_related("album").all()
)
assert len(tracks) == 3
for track in tracks:
assert track.album.name == "Malibu%"
tracks = await Track.objects.select_related("album").all(album=malibu)
assert len(tracks) == 3
for track in tracks:
assert track.album.name == "Malibu%"
@pytest.mark.asyncio
async def test_multiple_fk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
acme = await Organisation.objects.create(ident="ACME Ltd")
red_team = await Team.objects.create(org=acme, name="Red Team")
blue_team = await Team.objects.create(org=acme, name="Blue Team")
await Member.objects.create(team=red_team, email="a@example.org")
await Member.objects.create(team=red_team, email="b@example.org")
await Member.objects.create(team=blue_team, email="c@example.org")
await Member.objects.create(team=blue_team, email="d@example.org")
other = await Organisation.objects.create(ident="Other ltd")
team = await Team.objects.create(org=other, name="Green Team")
await Member.objects.create(team=team, email="e@example.org")
members = (
await Member.objects.select_related("team__org")
.filter(team__org__ident="ACME Ltd")
.all()
)
assert len(members) == 4
for member in members:
assert member.team.org.ident == "ACME Ltd"
@pytest.mark.asyncio
async def test_pk_filter():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
fantasies = await Album.objects.create(name="Test")
track = await Track.objects.create(
album=fantasies, title="Test1", position=1
)
await Track.objects.create(album=fantasies, title="Test2", position=2)
await Track.objects.create(album=fantasies, title="Test3", position=3)
tracks = (
await Track.objects.select_related("album").filter(pk=track.pk).all()
)
assert len(tracks) == 1
tracks = (
await Track.objects.select_related("album")
.filter(position=2, album__name="Test")
.all()
)
assert len(tracks) == 1
@pytest.mark.asyncio
async def test_limit_and_offset():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
fantasies = await Album.objects.create(name="Limitless")
await Track.objects.create(
id=None, album=fantasies, title="Sample", position=1
)
await Track.objects.create(album=fantasies, title="Sample2", position=2)
await Track.objects.create(album=fantasies, title="Sample3", position=3)
tracks = await Track.objects.limit(1).all()
assert len(tracks) == 1
assert tracks[0].title == "Sample"
tracks = await Track.objects.limit(1).offset(1).all()
assert len(tracks) == 1
assert tracks[0].title == "Sample2"
album = await Album.objects.select_related("tracks").limit(1).get()
assert len(album.tracks) == 3
assert album.tracks[0].title == "Sample"
album = (
await Album.objects.select_related("tracks")
.limit(1, limit_raw_sql=True)
.get()
)
assert len(album.tracks) == 1
assert album.tracks[0].title == "Sample"
@pytest.mark.asyncio
async def test_get_exceptions():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
fantasies = await Album.objects.create(name="Test")
with pytest.raises(NoMatch):
await Album.objects.get(name="Test2")
await Track.objects.create(album=fantasies, title="Test1", position=1)
await Track.objects.create(album=fantasies, title="Test2", position=2)
await Track.objects.create(album=fantasies, title="Test3", position=3)
with pytest.raises(MultipleMatches):
await Track.objects.select_related("album").get(album=fantasies)
@pytest.mark.asyncio
async def test_wrong_model_passed_as_fk():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
with pytest.raises(RelationshipInstanceError):
org = await Organisation.objects.create(ident="ACME Ltd")
await Track.objects.create(album=org, title="Test1", position=1)
@pytest.mark.asyncio
async def test_bulk_update_model_with_no_children():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
album = await Album.objects.create(name="Test")
album.name = "Test2"
await Album.objects.bulk_update([album], columns=["name"])
updated_album = await Album.objects.get(id=album.id)
assert updated_album.name == "Test2"
@pytest.mark.asyncio
async def test_bulk_update_model_with_children():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
best_seller = await Album.objects.create(name="to_be_best_seller")
best_seller2 = await Album.objects.create(name="to_be_best_seller2")
not_best_seller = await Album.objects.create(name="unpopular")
await Track.objects.create(
album=best_seller, title="t1", position=1, play_count=100
)
await Track.objects.create(
album=best_seller2, title="t2", position=1, play_count=100
)
await Track.objects.create(
album=not_best_seller, title="t3", position=1, play_count=3
)
await Track.objects.create(
album=best_seller, title="t4", position=1, play_count=500
)
tracks = (
await Track.objects.select_related("album")
.filter(play_count__gt=10)
.all()
)
best_seller_albums = {}
for track in tracks:
album = track.album
if album.id in best_seller_albums:
continue
album.is_best_seller = True
best_seller_albums[album.id] = album
await Album.objects.bulk_update(
best_seller_albums.values(), columns=["is_best_seller"]
)
best_seller_albums_db = await Album.objects.filter(
is_best_seller=True
).all()
assert len(best_seller_albums_db) == 2
collerek-ormar-c09209a/tests/test_relations/test_m2m_through_fields.py 0000664 0000000 0000000 00000030470 15130200524 0026350 0 ustar 00root root 0000000 0000000 from typing import Any, ForwardRef
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id = ormar.Integer(primary_key=True)
name = ormar.String(max_length=40)
class PostCategory(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts_x_categories")
id: int = ormar.Integer(primary_key=True)
sort_order: int = ormar.Integer(nullable=True)
param_name: str = ormar.String(default="Name", max_length=200)
class Blog(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories = ormar.ManyToMany(Category, through=PostCategory)
blog = ormar.ForeignKey(Blog)
create_test_database = init_tests(base_ormar_config)
class PostCategory2(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts_x_categories2")
id: int = ormar.Integer(primary_key=True)
sort_order: int = ormar.Integer(nullable=True)
class Post2(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories = ormar.ManyToMany(Category, through=ForwardRef("PostCategory2"))
@pytest.mark.asyncio
async def test_forward_ref_is_updated():
async with base_ormar_config.database:
assert Post2.ormar_config.requires_ref_update
Post2.update_forward_refs()
assert Post2.ormar_config.model_fields["postcategory2"].to == PostCategory2
@pytest.mark.asyncio
async def test_setting_fields_on_through_model():
async with base_ormar_config.database:
post = await Post(title="Test post").save()
category = await Category(name="Test category").save()
await post.categories.add(category)
assert hasattr(post.categories[0], "postcategory")
assert post.categories[0].postcategory is None
@pytest.mark.asyncio
async def test_setting_additional_fields_on_through_model_in_add():
async with base_ormar_config.database:
post = await Post(title="Test post").save()
category = await Category(name="Test category").save()
await post.categories.add(category, sort_order=1)
postcat = await PostCategory.objects.get()
assert postcat.sort_order == 1
@pytest.mark.asyncio
async def test_setting_additional_fields_on_through_model_in_create():
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category2", postcategory={"sort_order": 2}
)
postcat = await PostCategory.objects.get()
assert postcat.sort_order == 2
@pytest.mark.asyncio
async def test_getting_additional_fields_from_queryset() -> Any:
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1", postcategory={"sort_order": 1}
)
await post.categories.create(
name="Test category2", postcategory={"sort_order": 2}
)
await post.categories.all()
assert post.postcategory is None
assert post.categories[0].postcategory.sort_order == 1
assert post.categories[1].postcategory.sort_order == 2
post2 = await Post.objects.select_related("categories").get(
categories__name="Test category2"
)
assert post2.categories[0].postcategory.sort_order == 2
@pytest.mark.asyncio
async def test_only_one_side_has_through() -> Any:
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1", postcategory={"sort_order": 1}
)
await post.categories.create(
name="Test category2", postcategory={"sort_order": 2}
)
post2 = await Post.objects.select_related("categories").get()
assert post2.postcategory is None
assert post2.categories[0].postcategory is not None
await post2.categories.all()
assert post2.postcategory is None
assert post2.categories[0].postcategory is not None
categories = await Category.objects.select_related("posts").all()
assert isinstance(categories[0], Category)
assert categories[0].postcategory is None
assert categories[0].posts[0].postcategory is not None
@pytest.mark.asyncio
async def test_filtering_by_through_model() -> Any:
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1",
postcategory={"sort_order": 1, "param_name": "volume"},
)
await post.categories.create(
name="Test category2", postcategory={"sort_order": 2, "param_name": "area"}
)
post2 = (
await Post.objects.select_related("categories")
.filter(postcategory__sort_order__gt=1)
.get()
)
assert len(post2.categories) == 1
assert post2.categories[0].postcategory.sort_order == 2
post3 = await Post.objects.filter(
categories__postcategory__param_name="volume"
).get()
assert len(post3.categories) == 1
assert post3.categories[0].postcategory.param_name == "volume"
@pytest.mark.asyncio
async def test_deep_filtering_by_through_model() -> Any:
async with base_ormar_config.database:
blog = await Blog(title="My Blog").save()
post = await Post(title="Test post", blog=blog).save()
await post.categories.create(
name="Test category1",
postcategory={"sort_order": 1, "param_name": "volume"},
)
await post.categories.create(
name="Test category2", postcategory={"sort_order": 2, "param_name": "area"}
)
blog2 = (
await Blog.objects.select_related("posts__categories")
.filter(posts__postcategory__sort_order__gt=1)
.get()
)
assert len(blog2.posts) == 1
assert len(blog2.posts[0].categories) == 1
assert blog2.posts[0].categories[0].postcategory.sort_order == 2
blog3 = await Blog.objects.filter(
posts__categories__postcategory__param_name="volume"
).get()
assert len(blog3.posts) == 1
assert len(blog3.posts[0].categories) == 1
assert blog3.posts[0].categories[0].postcategory.param_name == "volume"
@pytest.mark.asyncio
async def test_ordering_by_through_model() -> Any:
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1",
postcategory={"sort_order": 2, "param_name": "volume"},
)
await post.categories.create(
name="Test category2", postcategory={"sort_order": 1, "param_name": "area"}
)
await post.categories.create(
name="Test category3",
postcategory={"sort_order": 3, "param_name": "velocity"},
)
post2 = (
await Post.objects.select_related("categories")
.order_by("-postcategory__sort_order")
.get()
)
assert len(post2.categories) == 3
assert post2.categories[0].name == "Test category3"
assert post2.categories[2].name == "Test category2"
post3 = (
await Post.objects.select_related("categories")
.order_by("categories__postcategory__param_name")
.get()
)
assert len(post3.categories) == 3
assert post3.categories[0].postcategory.param_name == "area"
assert post3.categories[2].postcategory.param_name == "volume"
@pytest.mark.asyncio
async def test_update_through_models_from_queryset_on_through() -> Any:
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1",
postcategory={"sort_order": 2, "param_name": "volume"},
)
await post.categories.create(
name="Test category2", postcategory={"sort_order": 1, "param_name": "area"}
)
await post.categories.create(
name="Test category3",
postcategory={"sort_order": 3, "param_name": "velocity"},
)
await PostCategory.objects.filter(param_name="volume", post=post.id).update(
sort_order=4
)
post2 = (
await Post.objects.select_related("categories")
.order_by("-postcategory__sort_order")
.get()
)
assert len(post2.categories) == 3
assert post2.categories[0].postcategory.param_name == "volume"
assert post2.categories[2].postcategory.param_name == "area"
@pytest.mark.asyncio
async def test_update_through_model_after_load() -> Any:
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1",
postcategory={"sort_order": 2, "param_name": "volume"},
)
post2 = await Post.objects.select_related("categories").get()
assert len(post2.categories) == 1
await post2.categories[0].postcategory.load()
await post2.categories[0].postcategory.update(sort_order=3)
post3 = await Post.objects.select_related("categories").get()
assert len(post3.categories) == 1
assert post3.categories[0].postcategory.sort_order == 3
@pytest.mark.asyncio
async def test_update_through_from_related() -> Any:
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1",
postcategory={"sort_order": 2, "param_name": "volume"},
)
await post.categories.create(
name="Test category2", postcategory={"sort_order": 1, "param_name": "area"}
)
await post.categories.create(
name="Test category3",
postcategory={"sort_order": 3, "param_name": "velocity"},
)
await post.categories.filter(name="Test category3").update(
postcategory={"sort_order": 4}
)
post2 = (
await Post.objects.select_related("categories")
.order_by("postcategory__sort_order")
.get()
)
assert len(post2.categories) == 3
assert post2.categories[2].postcategory.sort_order == 4
@pytest.mark.asyncio
async def test_excluding_fields_on_through_model() -> Any:
async with base_ormar_config.database:
post = await Post(title="Test post").save()
await post.categories.create(
name="Test category1",
postcategory={"sort_order": 2, "param_name": "volume"},
)
await post.categories.create(
name="Test category2", postcategory={"sort_order": 1, "param_name": "area"}
)
await post.categories.create(
name="Test category3",
postcategory={"sort_order": 3, "param_name": "velocity"},
)
post2 = (
await Post.objects.select_related("categories")
.exclude_fields("postcategory__param_name")
.order_by("postcategory__sort_order")
.get()
)
assert len(post2.categories) == 3
assert post2.categories[0].postcategory.param_name is None
assert post2.categories[0].postcategory.sort_order == 1
assert post2.categories[2].postcategory.param_name is None
assert post2.categories[2].postcategory.sort_order == 3
post3 = (
await Post.objects.select_related("categories")
.fields({"postcategory": ..., "title": ...})
.exclude_fields({"postcategory": {"param_name", "sort_order"}})
.get()
)
assert len(post3.categories) == 3
for category in post3.categories:
assert category.postcategory.param_name is None
assert category.postcategory.sort_order is None
collerek-ormar-c09209a/tests/test_relations/test_many_to_many.py 0000664 0000000 0000000 00000017572 15130200524 0025271 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
import pytest_asyncio
from ormar.exceptions import ModelPersistenceError, NoMatch, RelationshipInstanceError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=80)
last_name: str = ormar.String(max_length=80)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category)
author: Optional[Author] = ormar.ForeignKey(Author)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
PostCategory = Post.ormar_config.model_fields["categories"].through
await PostCategory.objects.delete(each=True)
await Post.objects.delete(each=True)
await Category.objects.delete(each=True)
await Author.objects.delete(each=True)
@pytest.mark.asyncio
async def test_not_saved_raises_error(cleanup):
async with base_ormar_config.database:
guido = await Author(first_name="Guido", last_name="Van Rossum").save()
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = Category(name="News")
with pytest.raises(ModelPersistenceError):
await post.categories.add(news)
@pytest.mark.asyncio
async def test_not_existing_raises_error(cleanup):
async with base_ormar_config.database:
guido = await Author(first_name="Guido", last_name="Van Rossum").save()
post = await Post.objects.create(title="Hello, M2M", author=guido)
with pytest.raises(NoMatch):
await post.categories.get()
assert await post.categories.get_or_none() is None
@pytest.mark.asyncio
async def test_assigning_related_objects(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
# Add a category to a post.
await post.categories.add(news)
# or from the other end:
await news.posts.add(post)
assert await post.categories.get_or_none(name="no exist") is None
assert await post.categories.get_or_none(name="News") == news
# Creating columns object from instance:
await post.categories.create(name="Tips")
assert len(post.categories) == 2
post_categories = await post.categories.all()
assert len(post_categories) == 2
@pytest.mark.asyncio
async def test_quering_of_the_m2m_models(cleanup):
async with base_ormar_config.database:
# orm can do this already.
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
# tl;dr: `post.categories` exposes the QuerySet API.
await post.categories.add(news)
post_categories = await post.categories.all()
assert len(post_categories) == 1
assert news == await post.categories.get(name="News")
num_posts = await news.posts.count()
assert num_posts == 1
posts_about_m2m = await news.posts.filter(title__contains="M2M").all()
assert len(posts_about_m2m) == 1
assert posts_about_m2m[0] == post
posts_about_python = await Post.objects.filter(categories__name="python").all()
assert len(posts_about_python) == 0
# Traversal of relationships: which categories has Guido contributed to?
category = await Category.objects.filter(posts__author=guido).get()
assert category == news
# or:
category2 = await Category.objects.filter(
posts__author__first_name="Guido"
).get()
assert category2 == news
@pytest.mark.asyncio
async def test_removal_of_the_relations(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
await post.categories.add(news)
assert len(await post.categories.all()) == 1
await post.categories.remove(news)
assert len(await post.categories.all()) == 0
# or:
await news.posts.add(post)
assert len(await news.posts.all()) == 1
await news.posts.remove(post)
assert len(await news.posts.all()) == 0
# Remove all columns objects:
await post.categories.add(news)
await post.categories.clear()
assert len(await post.categories.all()) == 0
# post would also lose 'news' category when running:
await post.categories.add(news)
await news.delete()
assert len(await post.categories.all()) == 0
@pytest.mark.asyncio
async def test_selecting_related(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
recent = await Category.objects.create(name="Recent")
await post.categories.add(news)
await post.categories.add(recent)
assert len(await post.categories.all()) == 2
# Loads categories and posts (2 queries) and perform the join in Python.
categories = await Category.objects.select_related("posts").all()
# No extra queries needed => no more `await`s required.
for category in categories:
assert category.posts[0] == post
news_posts = await news.posts.select_related("author").all()
assert news_posts[0].author == guido
assert (await post.categories.limit(1).all())[0] == news
assert (await post.categories.offset(1).limit(1).all())[0] == recent
assert await post.categories.first() == news
assert await post.categories.exists()
@pytest.mark.asyncio
async def test_selecting_related_fail_without_saving(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = Post(title="Hello, M2M", author=guido)
with pytest.raises(RelationshipInstanceError):
await post.categories.all()
@pytest.mark.asyncio
async def test_adding_unsaved_related(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = Category(name="News")
with pytest.raises(ModelPersistenceError):
await post.categories.add(news)
await news.save()
await post.categories.add(news)
assert len(await post.categories.all()) == 1
@pytest.mark.asyncio
async def test_removing_unsaved_related(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = Category(name="News")
with pytest.raises(NoMatch):
await post.categories.remove(news)
collerek-ormar-c09209a/tests/test_relations/test_postgress_select_related_with_limit.py 0000664 0000000 0000000 00000005752 15130200524 0032115 0 ustar 00root root 0000000 0000000 # Models
import uuid
from datetime import date
from enum import Enum
from typing import Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class PrimaryKeyMixin:
id: uuid.UUID = ormar.UUID(primary_key=True, default=uuid.uuid4)
class Level(Enum):
ADMIN = "0"
STAFF = "1"
class User(PrimaryKeyMixin, ormar.Model):
"""User Model Class to Implement Method for Operations of User Entity"""
mobile: str = ormar.String(unique=True, index=True, max_length=10)
password: str = ormar.String(max_length=128)
level: Level = ormar.Enum(default=Level.STAFF, enum_class=Level)
email: Optional[str] = ormar.String(max_length=255, nullable=True, default=None)
avatar: Optional[str] = ormar.String(max_length=255, nullable=True, default=None)
fullname: Optional[str] = ormar.String(max_length=64, nullable=True, default=None)
is_active: bool = ormar.Boolean(index=True, nullable=False, default=True)
ormar_config = base_ormar_config.copy(order_by=["-is_active", "-level"])
class Task(PrimaryKeyMixin, ormar.Model):
"""Task Model Class to Implement Method for Operations of Task Entity"""
name: str = ormar.String(max_length=64, nullalbe=False)
description: Optional[str] = ormar.Text(nullable=True, default=None)
start_date: Optional[date] = ormar.Date(nullable=True, default=None)
end_date: Optional[date] = ormar.Date(nullable=True, default=None)
is_halted: bool = ormar.Boolean(index=True, nullable=False, default=True)
user: User = ormar.ForeignKey(to=User)
ormar_config = base_ormar_config.copy(
order_by=["-end_date", "-start_date"],
constraints=[
ormar.UniqueColumns("user", "name"),
],
)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_selecting_related_with_limit():
async with base_ormar_config.database:
user1 = await User(mobile="9928917653", password="pass1").save()
user2 = await User(mobile="9928917654", password="pass2").save()
await Task(name="one", user=user1).save()
await Task(name="two", user=user1).save()
await Task(name="three", user=user2).save()
await Task(name="four", user=user2).save()
users = (
await User.objects.limit(2, limit_raw_sql=True)
.select_related(User.tasks)
.all()
)
users2 = (
await User.objects.select_related(User.tasks)
.limit(2, limit_raw_sql=True)
.all()
)
assert users == users2
assert len(users) == 1
assert len(users[0].tasks) == 2
users3 = await User.objects.limit(2).select_related(User.tasks).all()
users4 = await User.objects.select_related(User.tasks).limit(2).all()
assert users3 == users4
assert len(users3) == 2
assert len(users3[0].tasks) == 2
assert len(users3[1].tasks) == 2
collerek-ormar-c09209a/tests/test_relations/test_prefetch_related.py 0000664 0000000 0000000 00000034274 15130200524 0026075 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config(force_rollback=True)
class RandomSet(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="randoms")
id: int = ormar.Integer(name="random_id", primary_key=True)
name: str = ormar.String(max_length=100)
class Tonation(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tonations")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(name="tonation_name", max_length=100)
rand_set: Optional[RandomSet] = ormar.ForeignKey(RandomSet)
class Division(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="divisions")
id: int = ormar.Integer(name="division_id", primary_key=True)
name: str = ormar.String(max_length=100, nullable=True)
class Shop(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="shops")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=True)
division: Optional[Division] = ormar.ForeignKey(Division)
class AlbumShops(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums_x_shops")
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100, nullable=True)
shops: List[Shop] = ormar.ManyToMany(to=Shop, through=AlbumShops)
class Track(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tracks")
id: int = ormar.Integer(name="track_id", primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
tonation: Optional[Tonation] = ormar.ForeignKey(Tonation, name="tonation_id")
class Cover(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="covers")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(
Album, related_name="cover_pictures", name="album_id"
)
title: str = ormar.String(max_length=100)
artist: str = ormar.String(max_length=200, nullable=True)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_prefetch_related():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
album = Album(name="Malibu")
await album.save()
ton1 = await Tonation.objects.create(name="B-mol")
await Track.objects.create(
album=album, title="The Bird", position=1, tonation=ton1
)
await Track.objects.create(
album=album,
title="Heart don't stand a chance",
position=2,
tonation=ton1,
)
await Track.objects.create(
album=album, title="The Waters", position=3, tonation=ton1
)
await Cover.objects.create(title="Cover1", album=album, artist="Artist 1")
await Cover.objects.create(title="Cover2", album=album, artist="Artist 2")
fantasies = Album(name="Fantasies")
await fantasies.save()
await Track.objects.create(
album=fantasies, title="Help I'm Alive", position=1
)
await Track.objects.create(album=fantasies, title="Sick Muse", position=2)
await Track.objects.create(
album=fantasies, title="Satellite Mind", position=3
)
await Cover.objects.create(
title="Cover3", album=fantasies, artist="Artist 3"
)
await Cover.objects.create(
title="Cover4", album=fantasies, artist="Artist 4"
)
album = (
await Album.objects.filter(name="Malibu")
.prefetch_related(["tracks__tonation", "cover_pictures"])
.get()
)
assert len(album.tracks) == 3
assert album.tracks[0].title == "The Bird"
assert len(album.cover_pictures) == 2
assert album.cover_pictures[0].title == "Cover1"
assert (
album.tracks[0].tonation.name
== album.tracks[2].tonation.name
== "B-mol"
)
albums = await Album.objects.prefetch_related("tracks").all()
assert len(albums[0].tracks) == 3
assert len(albums[1].tracks) == 3
assert albums[0].tracks[0].title == "The Bird"
assert albums[1].tracks[0].title == "Help I'm Alive"
track = await Track.objects.prefetch_related(["album__cover_pictures"]).get(
title="The Bird"
)
assert track.album.name == "Malibu"
assert len(track.album.cover_pictures) == 2
assert track.album.cover_pictures[0].artist == "Artist 1"
track = (
await Track.objects.prefetch_related(["album__cover_pictures"])
.exclude_fields("album__cover_pictures__artist")
.get(title="The Bird")
)
assert track.album.name == "Malibu"
assert len(track.album.cover_pictures) == 2
assert track.album.cover_pictures[0].artist is None
tracks = await Track.objects.prefetch_related("album").all()
assert len(tracks) == 6
@pytest.mark.asyncio
async def test_prefetch_related_with_many_to_many():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
div = await Division.objects.create(name="Div 1")
shop1 = await Shop.objects.create(name="Shop 1", division=div)
shop2 = await Shop.objects.create(name="Shop 2", division=div)
album = Album(name="Malibu")
await album.save()
await album.shops.add(shop1)
await album.shops.add(shop2)
await Track.objects.create(album=album, title="The Bird", position=1)
await Track.objects.create(
album=album, title="Heart don't stand a chance", position=2
)
await Track.objects.create(album=album, title="The Waters", position=3)
await Cover.objects.create(title="Cover1", album=album, artist="Artist 1")
await Cover.objects.create(title="Cover2", album=album, artist="Artist 2")
track = await Track.objects.prefetch_related(
["album__cover_pictures", "album__shops__division"]
).get(title="The Bird")
assert track.album.name == "Malibu"
assert len(track.album.cover_pictures) == 2
assert track.album.cover_pictures[0].artist == "Artist 1"
assert len(track.album.shops) == 2
assert track.album.shops[0].name == "Shop 1"
assert track.album.shops[0].division.name == "Div 1"
album2 = Album(name="Malibu 2")
await album2.save()
await album2.shops.add(shop1)
await album2.shops.add(shop2)
await Track.objects.create(album=album2, title="The Bird 2", position=1)
tracks = await Track.objects.prefetch_related(["album__shops"]).all()
assert tracks[0].album.name == "Malibu"
assert tracks[0].album.shops[0].name == "Shop 1"
assert tracks[3].album.name == "Malibu 2"
assert tracks[3].album.shops[0].name == "Shop 1"
assert tracks[0].album.shops[0] == tracks[3].album.shops[0]
assert id(tracks[0].album.shops[0]) == id(tracks[3].album.shops[0])
tracks[0].album.shops[0].name = "Dummy"
assert tracks[0].album.shops[0].name == tracks[3].album.shops[0].name
@pytest.mark.asyncio
async def test_prefetch_related_empty():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
await Track.objects.create(title="The Bird", position=1)
track = await Track.objects.prefetch_related(["album__cover_pictures"]).get(
title="The Bird"
)
assert track.title == "The Bird"
assert track.album is None
@pytest.mark.asyncio
async def test_prefetch_related_with_select_related():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
div = await Division.objects.create(name="Div 1")
shop1 = await Shop.objects.create(name="Shop 1", division=div)
shop2 = await Shop.objects.create(name="Shop 2", division=div)
album = Album(name="Malibu")
await album.save()
await album.shops.add(shop1)
await album.shops.add(shop2)
await Cover.objects.create(title="Cover1", album=album, artist="Artist 1")
await Cover.objects.create(title="Cover2", album=album, artist="Artist 2")
album = (
await Album.objects.select_related(["tracks", "shops"])
.filter(name="Malibu")
.prefetch_related(["cover_pictures", "shops__division"])
.first()
)
assert len(album.tracks) == 0
assert len(album.cover_pictures) == 2
assert album.shops[0].division.name == "Div 1"
rand_set = await RandomSet.objects.create(name="Rand 1")
ton1 = await Tonation.objects.create(name="B-mol", rand_set=rand_set)
await Track.objects.create(
album=album, title="The Bird", position=1, tonation=ton1
)
await Track.objects.create(
album=album,
title="Heart don't stand a chance",
position=2,
tonation=ton1,
)
await Track.objects.create(
album=album, title="The Waters", position=3, tonation=ton1
)
album = (
await Album.objects.select_related("tracks__tonation__rand_set")
.filter(name="Malibu")
.prefetch_related(["cover_pictures", "shops__division"])
.order_by(
["-shops__name", "-cover_pictures__artist", "shops__division__name"]
)
.get()
)
assert len(album.tracks) == 3
assert album.tracks[0].tonation == album.tracks[2].tonation == ton1
assert len(album.cover_pictures) == 2
assert album.cover_pictures[0].artist == "Artist 2"
assert len(album.shops) == 2
assert album.shops[0].name == "Shop 2"
assert album.shops[0].division.name == "Div 1"
track = (
await Track.objects.select_related("album")
.prefetch_related(["album__cover_pictures", "album__shops__division"])
.get(title="The Bird")
)
assert track.album.name == "Malibu"
assert len(track.album.cover_pictures) == 2
assert track.album.cover_pictures[0].artist == "Artist 1"
assert len(track.album.shops) == 2
assert track.album.shops[0].name == "Shop 1"
assert track.album.shops[0].division.name == "Div 1"
@pytest.mark.asyncio
async def test_prefetch_related_with_select_related_and_fields():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
div = await Division.objects.create(name="Div 1")
shop1 = await Shop.objects.create(name="Shop 1", division=div)
shop2 = await Shop.objects.create(name="Shop 2", division=div)
album = Album(name="Malibu")
await album.save()
await album.shops.add(shop1)
await album.shops.add(shop2)
await Cover.objects.create(title="Cover1", album=album, artist="Artist 1")
await Cover.objects.create(title="Cover2", album=album, artist="Artist 2")
rand_set = await RandomSet.objects.create(name="Rand 1")
ton1 = await Tonation.objects.create(name="B-mol", rand_set=rand_set)
await Track.objects.create(
album=album, title="The Bird", position=1, tonation=ton1
)
await Track.objects.create(
album=album,
title="Heart don't stand a chance",
position=2,
tonation=ton1,
)
await Track.objects.create(
album=album, title="The Waters", position=3, tonation=ton1
)
album = (
await Album.objects.select_related("tracks__tonation__rand_set")
.filter(name="Malibu")
.prefetch_related(["cover_pictures", "shops__division"])
.exclude_fields({"shops": {"division": {"name"}}})
.get()
)
assert len(album.tracks) == 3
assert album.tracks[0].tonation == album.tracks[2].tonation == ton1
assert len(album.cover_pictures) == 2
assert album.cover_pictures[0].artist == "Artist 1"
assert len(album.shops) == 2
assert album.shops[0].name == "Shop 1"
assert album.shops[0].division.name is None
album = (
await Album.objects.select_related("tracks")
.filter(name="Malibu")
.prefetch_related(["cover_pictures", "shops__division"])
.fields(
{
"name": ...,
"shops": {"division"},
"cover_pictures": {"id": ..., "title": ...},
}
)
.exclude_fields({"shops": {"division": {"name"}}})
.get()
)
assert len(album.tracks) == 3
assert len(album.cover_pictures) == 2
assert album.cover_pictures[0].artist is None
assert album.cover_pictures[0].title is not None
assert len(album.shops) == 2
assert album.shops[0].name is None
assert album.shops[0].division is not None
assert album.shops[0].division.name is None
collerek-ormar-c09209a/tests/test_relations/test_prefetch_related_multiple_models_relation.py 0000664 0000000 0000000 00000004732 15130200524 0033244 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_users")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
class Signup(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_signup")
id: int = ormar.Integer(primary_key=True)
class Session(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_sessions")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255, index=True)
some_text: str = ormar.Text()
some_other_text: Optional[str] = ormar.Text(nullable=True)
teacher: Optional[User] = ormar.ForeignKey(
User, nullable=True, related_name="teaching"
)
students: Optional[List[User]] = ormar.ManyToMany(
User, through=Signup, related_name="attending"
)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_add_students():
async with base_ormar_config.database:
for user_id in [1, 2, 3, 4, 5]:
await User.objects.create(name=f"User {user_id}")
for name, some_text, some_other_text in [
("Session 1", "Some text 1", "Some other text 1"),
("Session 2", "Some text 2", "Some other text 2"),
("Session 3", "Some text 3", "Some other text 3"),
("Session 4", "Some text 4", "Some other text 4"),
("Session 5", "Some text 5", "Some other text 5"),
]:
await Session(
name=name, some_text=some_text, some_other_text=some_other_text
).save()
s1 = await Session.objects.get(pk=1)
s2 = await Session.objects.get(pk=2)
users = {}
for i in range(1, 6):
user = await User.objects.get(pk=i)
users[f"user_{i}"] = user
if i % 2 == 0:
await s1.students.add(user)
else:
await s2.students.add(user)
assert len(s1.students) > 0
assert len(s2.students) > 0
user = await User.objects.select_related("attending").get(pk=1)
assert user.attending is not None
assert len(user.attending) > 0
query = Session.objects.prefetch_related(["students", "teacher"])
sessions = await query.all()
assert len(sessions) == 5
collerek-ormar-c09209a/tests/test_relations/test_prefetch_related_with_same_models.py 0000664 0000000 0000000 00000007575 15130200524 0031504 0 ustar 00root root 0000000 0000000 from random import randint
from typing import ForwardRef, Optional
import ormar
import pytest
from faker import Faker
from ormar.relations.relation_proxy import RelationProxy
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
fake = Faker()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=256)
class BookAuthor(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="book_authors")
id: int = ormar.Integer(primary_key=True)
class BookCoAuthor(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="book_co_authors")
id: int = ormar.Integer(primary_key=True)
class Book(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="books")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=256)
description: Optional[str] = ormar.String(max_length=256, nullable=True)
authors: RelationProxy[Author] = ormar.ManyToMany(
Author, related_name="author_books", through=BookAuthor
)
co_authors: RelationProxy[Author] = ormar.ManyToMany(
Author, related_name="co_author_books", through=BookCoAuthor
)
class SelfRef(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="selfrefs")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
main_child = ormar.ForeignKey(to=ForwardRef("SelfRef"), related_name="parent")
children: RelationProxy["SelfRef"] = ormar.ManyToMany(ForwardRef("SelfRef"))
SelfRef.update_forward_refs()
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_prefetch_related_with_same_model_relations() -> None:
async with base_ormar_config.database:
for _ in range(6):
await Author.objects.create(name=fake.name())
book = await Book.objects.create(name=fake.sentence(nb_words=randint(1, 4)))
for i in range(1, 3):
await book.authors.add(await Author.objects.get(id=i))
for i in range(3, 6):
await book.co_authors.add(await Author.objects.get(id=i))
prefetch_result = await Book.objects.prefetch_related(
["authors", "co_authors"]
).all()
prefetch_dict_result = [x.model_dump() for x in prefetch_result if x.id == 1][0]
select_result = await Book.objects.select_related(
["authors", "co_authors"]
).all()
select_dict_result = [
x.model_dump(
exclude={
"authors": {"bookauthor": ...},
"co_authors": {"bookcoauthor": ...},
}
)
for x in select_result
if x.id == 1
][0]
assert prefetch_dict_result == select_dict_result
@pytest.mark.asyncio
async def test_prefetch_related_with_self_referencing() -> None:
async with base_ormar_config.database:
main_child = await SelfRef.objects.create(name="MainChild")
main = await SelfRef.objects.create(name="Main", main_child=main_child)
child1 = await SelfRef.objects.create(name="Child1")
child2 = await SelfRef.objects.create(name="Child2")
await main.children.add(child1)
await main.children.add(child2)
select_result = await SelfRef.objects.select_related(
["main_child", "children"]
).get(name="Main")
print(select_result.model_dump_json(indent=4))
prefetch_result = await SelfRef.objects.prefetch_related(
["main_child", "children"]
).get(name="Main")
assert prefetch_result.main_child.name == main_child.name
assert len(prefetch_result.children) == 2
assert prefetch_result.children[0].name == child1.name
assert prefetch_result.children[1].name == child2.name
collerek-ormar-c09209a/tests/test_relations/test_python_style_relations.py 0000664 0000000 0000000 00000006141 15130200524 0027406 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=80)
last_name: str = ormar.String(max_length=80)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category)
author: Optional[Author] = ormar.ForeignKey(Author, related_name="author_posts")
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
PostCategory = Post.ormar_config.model_fields["categories"].through
await PostCategory.objects.delete(each=True)
await Post.objects.delete(each=True)
await Category.objects.delete(each=True)
await Author.objects.delete(each=True)
@pytest.mark.asyncio
async def test_selecting_related(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
recent = await Category.objects.create(name="Recent")
await post.categories.add(news)
await post.categories.add(recent)
assert len(await post.categories.all()) == 2
# Loads categories and posts (2 queries) and perform the join in Python.
categories = await Category.objects.select_related(Category.posts).all()
assert len(categories) == 2
assert categories[0].name == "News"
news_posts = await news.posts.select_related(Post.author).all()
assert news_posts[0].author == guido
assert (await post.categories.limit(1).all())[0] == news
assert (await post.categories.offset(1).limit(1).all())[0] == recent
assert await post.categories.first() == news
assert await post.categories.exists()
author = await Author.objects.prefetch_related(
Author.author_posts.categories
).get()
assert len(author.author_posts) == 1
assert author.author_posts[0].title == "Hello, M2M"
assert author.author_posts[0].categories[0].name == "News"
assert author.author_posts[0].categories[1].name == "Recent"
post = await Post.objects.select_related([Post.author, Post.categories]).get()
assert len(post.categories) == 2
assert post.categories[0].name == "News"
assert post.categories[1].name == "Recent"
assert post.author.first_name == "Guido"
collerek-ormar-c09209a/tests/test_relations/test_relations_default_exception.py 0000664 0000000 0000000 00000003036 15130200524 0030347 0 ustar 00root root 0000000 0000000 # type: ignore
from typing import List, Optional
import ormar
import pytest
from ormar.exceptions import ModelDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="authors")
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=80)
last_name: str = ormar.String(max_length=80)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
create_test_database = init_tests(base_ormar_config)
def test_fk_error():
with pytest.raises(ModelDefinitionError):
class Post(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category)
author: Optional[Author] = ormar.ForeignKey(Author, default="aa")
def test_m2m_error():
with pytest.raises(ModelDefinitionError):
class Post(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(
Category, default="aa"
)
collerek-ormar-c09209a/tests/test_relations/test_replacing_models_with_copy.py 0000664 0000000 0000000 00000002330 15130200524 0030155 0 ustar 00root root 0000000 0000000 from typing import Any, Optional, Tuple, Union
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
properties: Tuple[str, Any]
score: Union[str, int]
class Track(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="tracks")
id: int = ormar.Integer(primary_key=True)
album: Optional[Album] = ormar.ForeignKey(Album)
title: str = ormar.String(max_length=100)
position: int = ormar.Integer()
play_count: int = ormar.Integer(nullable=True, default=0)
is_disabled: bool = ormar.Boolean(default=False)
properties: Tuple[str, Any]
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_model_is_replaced_by_a_copy():
assert Album.model_fields["tracks"].annotation.__args__[1] != Track
assert (
Album.model_fields["tracks"].annotation.__args__[1].model_fields.keys()
== Track.model_fields.keys()
)
collerek-ormar-c09209a/tests/test_relations/test_reverse_relation_preserves_validator.py 0000664 0000000 0000000 00000002263 15130200524 0032301 0 ustar 00root root 0000000 0000000 from typing import List, Optional, Union
import ormar
import pytest_asyncio
from pydantic import field_validator
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=80)
@field_validator("name", mode="before")
@classmethod
def validate_name(cls, v: Union[str, List[str]]) -> str:
if isinstance(v, list):
v = " ".join(v)
return v
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
author: Optional[Author] = ormar.ForeignKey(Author)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(scope="function", autouse=True)
async def cleanup():
yield
async with base_ormar_config.database:
await Post.objects.delete(each=True)
await Author.objects.delete(each=True)
def test_validator():
author = Author(name=["Test", "Author"])
assert author.name == "Test Author"
collerek-ormar-c09209a/tests/test_relations/test_saving_related.py 0000664 0000000 0000000 00000003657 15130200524 0025565 0 ustar 00root root 0000000 0000000 from typing import Union
import ormar
import pytest
from ormar.exceptions import ModelPersistenceError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50, unique=True, index=True)
code: int = ormar.Integer()
class Workshop(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="workshops")
id: int = ormar.Integer(primary_key=True)
topic: str = ormar.String(max_length=255, index=True)
category: Union[ormar.Model, Category] = ormar.ForeignKey(
Category, related_name="workshops", nullable=False
)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_model_relationship():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
cat = await Category(name="Foo", code=123).save()
ws = await Workshop(topic="Topic 1", category=cat).save()
assert ws.id == 1
assert ws.topic == "Topic 1"
assert ws.category.name == "Foo"
ws.topic = "Topic 2"
await ws.update()
assert ws.id == 1
assert ws.topic == "Topic 2"
assert ws.category.name == "Foo"
@pytest.mark.asyncio
async def test_model_relationship_with_not_saved():
async with base_ormar_config.database:
async with base_ormar_config.database.transaction(force_rollback=True):
cat = Category(name="Foo", code=123)
with pytest.raises(ModelPersistenceError):
await Workshop(topic="Topic 1", category=cat).save()
with pytest.raises(ModelPersistenceError):
await Workshop.objects.create(topic="Topic 1", category=cat)
collerek-ormar-c09209a/tests/test_relations/test_select_related_with_limit.py 0000664 0000000 0000000 00000010650 15130200524 0027775 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Keyword(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="keywords")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
class KeywordPrimaryModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="primary_models_keywords")
id: int = ormar.Integer(primary_key=True)
class PrimaryModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="primary_models")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255, index=True)
some_text: str = ormar.Text()
some_other_text: Optional[str] = ormar.Text(nullable=True)
keywords: Optional[List[Keyword]] = ormar.ManyToMany(
Keyword, through=KeywordPrimaryModel
)
class SecondaryModel(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="secondary_models")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
primary_model: PrimaryModel = ormar.ForeignKey(
PrimaryModel, related_name="secondary_models"
)
@pytest.mark.asyncio
async def test_create_primary_models():
async with base_ormar_config.database:
for name, some_text, some_other_text in [
("Primary 1", "Some text 1", "Some other text 1"),
("Primary 2", "Some text 2", "Some other text 2"),
("Primary 3", "Some text 3", "Some other text 3"),
("Primary 4", "Some text 4", "Some other text 4"),
("Primary 5", "Some text 5", "Some other text 5"),
("Primary 6", "Some text 6", "Some other text 6"),
("Primary 7", "Some text 7", "Some other text 7"),
("Primary 8", "Some text 8", "Some other text 8"),
("Primary 9", "Some text 9", "Some other text 9"),
("Primary 10", "Some text 10", "Some other text 10"),
]:
await PrimaryModel(
name=name, some_text=some_text, some_other_text=some_other_text
).save()
for tag_id in [1, 2, 3, 4, 5]:
await Keyword.objects.create(name=f"Tag {tag_id}")
p1 = await PrimaryModel.objects.get(pk=1)
p2 = await PrimaryModel.objects.get(pk=2)
for i in range(1, 6):
keyword = await Keyword.objects.get(pk=i)
if i % 2 == 0:
await p1.keywords.add(keyword)
else:
await p2.keywords.add(keyword)
models = await PrimaryModel.objects.select_related("keywords").limit(5).all()
assert len(models) == 5
assert len(models[0].keywords) == 2
assert len(models[1].keywords) == 3
assert len(models[2].keywords) == 0
models2 = (
await PrimaryModel.objects.select_related("keywords")
.limit(5)
.offset(3)
.all()
)
assert len(models2) == 5
assert [x.name for x in models2] != [x.name for x in models]
assert [x.name for x in models2] == [
"Primary 4",
"Primary 5",
"Primary 6",
"Primary 7",
"Primary 8",
]
models3 = (
await PrimaryModel.objects.select_related("keywords")
.limit(5, limit_raw_sql=True)
.all()
)
assert len(models3) == 2
assert len(models3[0].keywords) == 2
assert len(models3[1].keywords) == 3
models4 = (
await PrimaryModel.objects.offset(1)
.select_related("keywords")
.limit(5, limit_raw_sql=True)
.all()
)
assert len(models4) == 3
assert [x.name for x in models4] == ["Primary 1", "Primary 2", "Primary 3"]
assert len(models4[0].keywords) == 1
assert len(models4[1].keywords) == 3
assert len(models4[2].keywords) == 0
models5 = (
await PrimaryModel.objects.select_related("keywords")
.offset(2, limit_raw_sql=True)
.limit(5)
.all()
)
assert len(models5) == 3
assert [x.name for x in models5] == ["Primary 2", "Primary 3", "Primary 4"]
assert len(models5[0].keywords) == 3
assert len(models5[1].keywords) == 0
assert len(models5[2].keywords) == 0
create_test_database = init_tests(base_ormar_config)
collerek-ormar-c09209a/tests/test_relations/test_select_related_with_m2m_and_pk_name_set.py 0000664 0000000 0000000 00000007371 15130200524 0032547 0 ustar 00root root 0000000 0000000 # type: ignore
from datetime import date
from typing import List, Optional, Union
import ormar
import pytest
import sqlalchemy
from ormar import ModelDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Role(ormar.Model):
ormar_config = base_ormar_config.copy()
name: str = ormar.String(primary_key=True, max_length=1000)
order: int = ormar.Integer(default=0, name="sort_order")
description: str = ormar.Text()
class Company(ormar.Model):
ormar_config = base_ormar_config.copy()
name: str = ormar.String(primary_key=True, max_length=1000)
class UserRoleCompany(ormar.Model):
ormar_config = base_ormar_config.copy()
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
registrationnumber: str = ormar.String(primary_key=True, max_length=1000)
company: Company = ormar.ForeignKey(Company)
company2: Company = ormar.ForeignKey(Company, related_name="secondary_users")
name: str = ormar.Text()
role: Optional[Role] = ormar.ForeignKey(Role)
roleforcompanies: Optional[Union[Company, List[Company]]] = ormar.ManyToMany(
Company, through=UserRoleCompany, related_name="role_users"
)
lastupdate: date = ormar.DateTime(server_default=sqlalchemy.func.now())
create_test_database = init_tests(base_ormar_config)
def test_wrong_model():
with pytest.raises(ModelDefinitionError):
class User(ormar.Model):
ormar_config = base_ormar_config.copy()
registrationnumber: str = ormar.Text(primary_key=True)
company: Company = ormar.ForeignKey(Company)
company2: Company = ormar.ForeignKey(Company)
@pytest.mark.asyncio
async def test_create_primary_models():
async with base_ormar_config.database:
await Role.objects.create(
name="user", order=0, description="no administration right"
)
role_1 = await Role.objects.create(
name="admin", order=1, description="standard administration right"
)
await Role.objects.create(
name="super_admin", order=2, description="super administration right"
)
assert await Role.objects.count() == 3
company_0 = await Company.objects.create(name="Company")
company_1 = await Company.objects.create(name="Subsidiary Company 1")
company_2 = await Company.objects.create(name="Subsidiary Company 2")
company_3 = await Company.objects.create(name="Subsidiary Company 3")
assert await Company.objects.count() == 4
user = await User.objects.create(
registrationnumber="00-00000", company=company_0, name="admin", role=role_1
)
assert await User.objects.count() == 1
await user.delete()
assert await User.objects.count() == 0
user = await User.objects.create(
registrationnumber="00-00000",
company=company_0,
company2=company_3,
name="admin",
role=role_1,
)
await user.roleforcompanies.add(company_1)
await user.roleforcompanies.add(company_2)
users = await User.objects.select_related(
["company", "company2", "roleforcompanies"]
).all()
assert len(users) == 1
assert len(users[0].roleforcompanies) == 2
assert len(users[0].roleforcompanies[0].role_users) == 1
assert users[0].company.name == "Company"
assert len(users[0].company.users) == 1
assert users[0].company2.name == "Subsidiary Company 3"
assert len(users[0].company2.secondary_users) == 1
users = await User.objects.select_related("roleforcompanies").all()
assert len(users) == 1
assert len(users[0].roleforcompanies) == 2
collerek-ormar-c09209a/tests/test_relations/test_selecting_proper_table_prefix.py 0000664 0000000 0000000 00000004454 15130200524 0030662 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class User(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_users")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
class Signup(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_signup")
id: int = ormar.Integer(primary_key=True)
class Session(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="test_sessions")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255, index=True)
some_text: str = ormar.Text()
some_other_text: Optional[str] = ormar.Text(nullable=True)
students: Optional[List[User]] = ormar.ManyToMany(User, through=Signup)
create_test_database = init_tests(base_ormar_config)
@pytest.mark.asyncio
async def test_list_sessions_for_user():
async with base_ormar_config.database:
for user_id in [1, 2, 3, 4, 5]:
await User.objects.create(name=f"User {user_id}")
for name, some_text, some_other_text in [
("Session 1", "Some text 1", "Some other text 1"),
("Session 2", "Some text 2", "Some other text 2"),
("Session 3", "Some text 3", "Some other text 3"),
("Session 4", "Some text 4", "Some other text 4"),
("Session 5", "Some text 5", "Some other text 5"),
]:
await Session(
name=name, some_text=some_text, some_other_text=some_other_text
).save()
s1 = await Session.objects.get(pk=1)
s2 = await Session.objects.get(pk=2)
users = {}
for i in range(1, 6):
user = await User.objects.get(pk=i)
users[f"user_{i}"] = user
if i % 2 == 0:
await s1.students.add(user)
else:
await s2.students.add(user)
assert len(s1.students) == 2
assert len(s2.students) == 3
assert [x.pk for x in s1.students] == [2, 4]
assert [x.pk for x in s2.students] == [1, 3, 5]
user = await User.objects.select_related("sessions").get(pk=1)
assert user.sessions is not None
assert len(user.sessions) > 0
collerek-ormar-c09209a/tests/test_relations/test_skipping_reverse.py 0000664 0000000 0000000 00000015732 15130200524 0026152 0 ustar 00root root 0000000 0000000 from typing import List, Optional
import ormar
import pytest
import pytest_asyncio
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class Author(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
first_name: str = ormar.String(max_length=80)
last_name: str = ormar.String(max_length=80)
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=40)
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories: Optional[List[Category]] = ormar.ManyToMany(Category, skip_reverse=True)
author: Optional[Author] = ormar.ForeignKey(Author, skip_reverse=True)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
PostCategory = Post.ormar_config.model_fields["categories"].through
await PostCategory.objects.delete(each=True)
await Post.objects.delete(each=True)
await Category.objects.delete(each=True)
await Author.objects.delete(each=True)
def test_model_definition():
category = Category(name="Test")
author = Author(first_name="Test", last_name="Author")
post = Post(title="Test Post", author=author)
post.categories = category
assert post.categories[0] == category
assert post.author == author
with pytest.raises(AttributeError):
assert author.posts
with pytest.raises(AttributeError):
assert category.posts
assert "posts" not in category._orm
@pytest.mark.asyncio
async def test_assigning_related_objects(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
# Add a category to a post.
await post.categories.add(news)
# other way is disabled
with pytest.raises(AttributeError):
await news.posts.add(post)
assert await post.categories.get_or_none(name="no exist") is None
assert await post.categories.get_or_none(name="News") == news
# Creating columns object from instance:
await post.categories.create(name="Tips")
assert len(post.categories) == 2
post_categories = await post.categories.all()
assert len(post_categories) == 2
category = await Category.objects.select_related("posts").get(name="News")
with pytest.raises(AttributeError):
assert category.posts
@pytest.mark.asyncio
async def test_quering_of_related_model_works_but_no_result(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
await post.categories.add(news)
post_categories = await post.categories.all()
assert len(post_categories) == 1
assert "posts" not in post.model_dump().get("categories", [])[0]
assert news == await post.categories.get(name="News")
posts_about_python = await Post.objects.filter(categories__name="python").all()
assert len(posts_about_python) == 0
# relation not in dict
category = (
await Category.objects.select_related("posts")
.filter(posts__author=guido)
.get()
)
assert category == news
assert "posts" not in category.model_dump()
# relation not in json
category2 = (
await Category.objects.select_related("posts")
.filter(posts__author__first_name="Guido")
.get()
)
assert category2 == news
assert "posts" not in category2.model_dump_json()
assert "posts" not in Category.model_json_schema().get("properties")
@pytest.mark.asyncio
async def test_removal_of_the_relations(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
post = await Post.objects.create(title="Hello, M2M", author=guido)
news = await Category.objects.create(name="News")
await post.categories.add(news)
assert len(await post.categories.all()) == 1
await post.categories.remove(news)
assert len(await post.categories.all()) == 0
with pytest.raises(AttributeError):
await news.posts.add(post)
with pytest.raises(AttributeError):
await news.posts.remove(post)
await post.categories.add(news)
await post.categories.clear()
assert len(await post.categories.all()) == 0
await post.categories.add(news)
await news.delete()
assert len(await post.categories.all()) == 0
@pytest.mark.asyncio
async def test_selecting_related(cleanup):
async with base_ormar_config.database:
guido = await Author.objects.create(first_name="Guido", last_name="Van Rossum")
guido2 = await Author.objects.create(
first_name="Guido2", last_name="Van Rossum"
)
post = await Post.objects.create(title="Hello, M2M", author=guido)
post2 = await Post.objects.create(title="Bye, M2M", author=guido2)
news = await Category.objects.create(name="News")
recent = await Category.objects.create(name="Recent")
await post.categories.add(news)
await post.categories.add(recent)
await post2.categories.add(recent)
assert len(await post.categories.all()) == 2
assert (await post.categories.limit(1).all())[0] == news
assert (await post.categories.offset(1).limit(1).all())[0] == recent
assert await post.categories.first() == news
assert await post.categories.exists()
# still can order
categories = (
await Category.objects.select_related("posts")
.order_by("posts__title")
.all()
)
assert categories[0].name == "Recent"
assert categories[1].name == "News"
# still can filter
categories = await Category.objects.filter(posts__title="Bye, M2M").all()
assert categories[0].name == "Recent"
assert len(categories) == 1
# same for reverse fk
authors = (
await Author.objects.select_related("posts").order_by("posts__title").all()
)
assert authors[0].first_name == "Guido2"
assert authors[1].first_name == "Guido"
authors = await Author.objects.filter(posts__title="Bye, M2M").all()
assert authors[0].first_name == "Guido2"
assert len(authors) == 1
collerek-ormar-c09209a/tests/test_relations/test_through_relations_fail.py 0000664 0000000 0000000 00000002442 15130200524 0027320 0 ustar 00root root 0000000 0000000 # type: ignore
import ormar
import pytest
from ormar import ModelDefinitionError
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
def test_through_with_relation_fails():
class Category(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="categories")
id = ormar.Integer(primary_key=True)
name = ormar.String(max_length=40)
class Blog(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
class PostCategory(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="posts_x_categories")
id: int = ormar.Integer(primary_key=True)
sort_order: int = ormar.Integer(nullable=True)
param_name: str = ormar.String(default="Name", max_length=200)
blog = ormar.ForeignKey(Blog)
with pytest.raises(ModelDefinitionError):
class Post(ormar.Model):
ormar_config = base_ormar_config.copy()
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=200)
categories = ormar.ManyToMany(Category, through=PostCategory)
create_test_database = init_tests(base_ormar_config)
collerek-ormar-c09209a/tests/test_relations/test_weakref_checking.py 0000664 0000000 0000000 00000002034 15130200524 0026041 0 ustar 00root root 0000000 0000000 import ormar
from tests.settings import create_config
base_ormar_config = create_config()
from tests.lifespan import init_tests
class Band(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="bands")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Artist(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="artists")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
band: Band = ormar.ForeignKey(Band)
create_test_database = init_tests(base_ormar_config)
def test_weakref_init():
band = Band(name="Band")
artist1 = Artist(name="Artist 1", band=band)
artist2 = Artist(name="Artist 2", band=band)
artist3 = Artist(name="Artist 3", band=band)
del artist1
Artist(
name="Artist 2", band=band
) # Force it to check for weakly-referenced objects
del artist3
band.artists # Force it to clean
assert len(band.artists) == 1
assert band.artists[0].name == artist2.name
collerek-ormar-c09209a/tests/test_signals/ 0000775 0000000 0000000 00000000000 15130200524 0020612 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_signals/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022711 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_signals/test_signals.py 0000664 0000000 0000000 00000027766 15130200524 0023705 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pydantic
import pytest
import pytest_asyncio
from ormar import (
post_bulk_update,
post_delete,
post_save,
post_update,
pre_delete,
pre_save,
pre_update,
)
from ormar.exceptions import SignalDefinitionError
from ormar.signals import SignalEmitter
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class AuditLog(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="audits")
id: int = ormar.Integer(primary_key=True)
event_type: str = ormar.String(max_length=100)
event_log: pydantic.Json = ormar.JSON()
class Cover(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="covers")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=100)
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
is_best_seller: bool = ormar.Boolean(default=False)
play_count: int = ormar.Integer(default=0)
cover: Optional[Cover] = ormar.ForeignKey(Cover)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
await AuditLog.objects.delete(each=True)
await Album.objects.delete(each=True)
def test_passing_not_callable():
with pytest.raises(SignalDefinitionError):
pre_save(Album)("wrong")
def test_passing_callable_without_kwargs():
with pytest.raises(SignalDefinitionError):
@pre_save(Album)
def trigger(sender, instance): # pragma: no cover
pass
def test_invalid_signal():
emitter = SignalEmitter()
with pytest.raises(SignalDefinitionError):
emitter.save = 1
@pytest.mark.asyncio
async def test_signal_functions(cleanup):
async with base_ormar_config.database:
@pre_save(Album)
async def before_save(sender, instance, **kwargs):
await AuditLog(
event_type=f"PRE_SAVE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
@post_save(Album)
async def after_save(sender, instance, **kwargs):
await AuditLog(
event_type=f"POST_SAVE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
@pre_update(Album)
async def before_update(sender, instance, **kwargs):
await AuditLog(
event_type=f"PRE_UPDATE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
@post_update(Album)
async def after_update(sender, instance, **kwargs):
await AuditLog(
event_type=f"POST_UPDATE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
@pre_delete(Album)
async def before_delete(sender, instance, **kwargs):
await AuditLog(
event_type=f"PRE_DELETE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
@post_delete(Album)
async def after_delete(sender, instance, **kwargs):
await AuditLog(
event_type=f"POST_DELETE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
@post_bulk_update(Album)
async def after_bulk_update(sender, instances, **kwargs):
for it in instances:
await AuditLog(
event_type=f"BULK_POST_UPDATE_{sender.get_name()}",
event_log=it.model_dump_json(),
).save()
album = await Album.objects.create(name="Venice")
audits = await AuditLog.objects.all()
assert len(audits) == 2
assert audits[0].event_type == "PRE_SAVE_album"
assert audits[0].event_log.get("name") == album.name
assert audits[1].event_type == "POST_SAVE_album"
assert audits[1].event_log.get("id") == album.pk
album = await Album(name="Rome").save()
audits = await AuditLog.objects.all()
assert len(audits) == 4
assert audits[2].event_type == "PRE_SAVE_album"
assert audits[2].event_log.get("name") == album.name
assert audits[3].event_type == "POST_SAVE_album"
assert audits[3].event_log.get("id") == album.pk
album.is_best_seller = True
await album.update()
audits = await AuditLog.objects.filter(event_type__contains="UPDATE").all()
assert len(audits) == 2
assert audits[0].event_type == "PRE_UPDATE_album"
assert audits[0].event_log.get("name") == album.name
assert audits[1].event_type == "POST_UPDATE_album"
assert audits[1].event_log.get("is_best_seller") == album.is_best_seller
album.signals.pre_update.disconnect(before_update)
album.signals.post_update.disconnect(after_update)
album.is_best_seller = False
await album.update()
audits = await AuditLog.objects.filter(event_type__contains="UPDATE").all()
assert len(audits) == 2
await album.delete()
audits = await AuditLog.objects.filter(event_type__contains="DELETE").all()
assert len(audits) == 2
assert audits[0].event_type == "PRE_DELETE_album"
assert (
audits[0].event_log.get("id") == audits[1].event_log.get("id") == album.id
)
assert audits[1].event_type == "POST_DELETE_album"
album.signals.pre_delete.disconnect(before_delete)
album.signals.post_delete.disconnect(after_delete)
album.signals.pre_save.disconnect(before_save)
album.signals.post_save.disconnect(after_save)
albums = await Album.objects.all()
assert len(albums)
for album in albums:
album.play_count = 1
await Album.objects.bulk_update(albums)
cnt = await AuditLog.objects.filter(event_type__contains="BULK_POST").count()
assert cnt == len(albums)
album.signals.bulk_post_update.disconnect(after_bulk_update)
@pytest.mark.asyncio
async def test_multiple_signals(cleanup):
async with base_ormar_config.database:
@pre_save(Album)
async def before_save(sender, instance, **kwargs):
await AuditLog(
event_type=f"PRE_SAVE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
@pre_save(Album)
async def before_save2(sender, instance, **kwargs):
await AuditLog(
event_type=f"PRE_SAVE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
album = await Album.objects.create(name="Miami")
audits = await AuditLog.objects.all()
assert len(audits) == 2
assert audits[0].event_type == "PRE_SAVE_album"
assert audits[0].event_log.get("name") == album.name
assert audits[1].event_type == "PRE_SAVE_album"
assert audits[1].event_log.get("name") == album.name
album.signals.pre_save.disconnect(before_save)
album.signals.pre_save.disconnect(before_save2)
@pytest.mark.asyncio
async def test_static_methods_as_signals(cleanup):
async with base_ormar_config.database:
class AlbumAuditor:
event_type = "ALBUM_INSTANCE"
@staticmethod
@pre_save(Album)
async def before_save(sender, instance, **kwargs):
await AuditLog(
event_type=f"{AlbumAuditor.event_type}_SAVE",
event_log=instance.model_dump_json(),
).save()
album = await Album.objects.create(name="Colorado")
audits = await AuditLog.objects.all()
assert len(audits) == 1
assert audits[0].event_type == "ALBUM_INSTANCE_SAVE"
assert audits[0].event_log.get("name") == album.name
album.signals.pre_save.disconnect(AlbumAuditor.before_save)
@pytest.mark.asyncio
async def test_methods_as_signals(cleanup):
async with base_ormar_config.database:
class AlbumAuditor:
def __init__(self):
self.event_type = "ALBUM_INSTANCE"
async def before_save(self, sender, instance, **kwargs):
await AuditLog(
event_type=f"{self.event_type}_SAVE",
event_log=instance.model_dump_json(),
).save()
auditor = AlbumAuditor()
pre_save(Album)(auditor.before_save)
album = await Album.objects.create(name="San Francisco")
audits = await AuditLog.objects.all()
assert len(audits) == 1
assert audits[0].event_type == "ALBUM_INSTANCE_SAVE"
assert audits[0].event_log.get("name") == album.name
album.signals.pre_save.disconnect(auditor.before_save)
@pytest.mark.asyncio
async def test_multiple_senders_signal(cleanup):
async with base_ormar_config.database:
@pre_save([Album, Cover])
async def before_save(sender, instance, **kwargs):
await AuditLog(
event_type=f"PRE_SAVE_{sender.get_name()}",
event_log=instance.model_dump_json(),
).save()
cover = await Cover(title="Blue").save()
album = await Album.objects.create(name="San Francisco", cover=cover)
audits = await AuditLog.objects.all()
assert len(audits) == 2
assert audits[0].event_type == "PRE_SAVE_cover"
assert audits[0].event_log.get("title") == cover.title
assert audits[1].event_type == "PRE_SAVE_album"
assert audits[1].event_log.get("cover") == album.cover.model_dump(
exclude={"albums"}
)
album.signals.pre_save.disconnect(before_save)
cover.signals.pre_save.disconnect(before_save)
@pytest.mark.asyncio
async def test_modifing_the_instance(cleanup):
async with base_ormar_config.database:
@pre_update(Album)
async def before_update(sender, instance, **kwargs):
if instance.play_count > 50 and not instance.is_best_seller:
instance.is_best_seller = True
# here album.play_count ans is_best_seller get default values
album = await Album.objects.create(name="Venice")
assert not album.is_best_seller
assert album.play_count == 0
album.play_count = 30
# here a trigger is called but play_count is too low
await album.update()
assert not album.is_best_seller
album.play_count = 60
await album.update()
assert album.is_best_seller
album.signals.pre_update.disconnect(before_update)
@pytest.mark.asyncio
async def test_custom_signal(cleanup):
async with base_ormar_config.database:
async def after_update(sender, instance, **kwargs):
if instance.play_count > 50 and not instance.is_best_seller:
instance.is_best_seller = True
elif instance.play_count < 50 and instance.is_best_seller:
instance.is_best_seller = False
await instance.update()
Album.ormar_config.signals.custom.connect(after_update)
# here album.play_count and is_best_seller get default values
album = await Album.objects.create(name="Venice")
assert not album.is_best_seller
assert album.play_count == 0
album.play_count = 30
# here a trigger is called but play_count is too low
await album.update()
assert not album.is_best_seller
album.play_count = 60
await album.update()
assert not album.is_best_seller
await Album.ormar_config.signals.custom.send(sender=Album, instance=album)
assert album.is_best_seller
album.play_count = 30
await album.update()
assert album.is_best_seller
await Album.ormar_config.signals.custom.send(sender=Album, instance=album)
assert not album.is_best_seller
Album.ormar_config.signals.custom.disconnect(after_update)
collerek-ormar-c09209a/tests/test_signals/test_signals_for_relations.py 0000664 0000000 0000000 00000015655 15130200524 0026625 0 ustar 00root root 0000000 0000000 from typing import Optional
import ormar
import pydantic
import pytest
import pytest_asyncio
from ormar import (
post_relation_add,
post_relation_remove,
pre_relation_add,
pre_relation_remove,
)
from tests.lifespan import init_tests
from tests.settings import create_config
base_ormar_config = create_config()
class AuditLog(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="audits")
id: int = ormar.Integer(primary_key=True)
event_type: str = ormar.String(max_length=100)
event_log: pydantic.Json = ormar.JSON()
class Cover(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="covers")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=100)
class Artist(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="artists")
id: int = ormar.Integer(name="artist_id", primary_key=True)
name: str = ormar.String(name="fname", max_length=100)
class Album(ormar.Model):
ormar_config = base_ormar_config.copy(tablename="albums")
id: int = ormar.Integer(primary_key=True)
title: str = ormar.String(max_length=100)
cover: Optional[Cover] = ormar.ForeignKey(Cover)
artists = ormar.ManyToMany(Artist)
create_test_database = init_tests(base_ormar_config)
@pytest_asyncio.fixture(autouse=True, scope="function")
async def cleanup():
yield
async with base_ormar_config.database:
await AuditLog.objects.delete(each=True)
await Album.objects.delete(each=True)
@pytest.mark.asyncio
async def test_relation_signal_functions():
async with base_ormar_config.database:
@pre_relation_add([Album, Cover, Artist])
async def before_relation_add(
sender, instance, child, relation_name, passed_kwargs, **kwargs
):
await AuditLog.objects.create(
event_type="RELATION_PRE_ADD",
event_log=dict(
class_affected=sender.get_name(),
parent_id=instance.pk,
child_id=child.pk,
relation_name=relation_name,
kwargs=passed_kwargs,
),
)
passed_kwargs.pop("dummy", None)
@post_relation_add([Album, Cover, Artist])
async def after_relation_add(
sender, instance, child, relation_name, passed_kwargs, **kwargs
):
await AuditLog.objects.create(
event_type="RELATION_POST_ADD",
event_log=dict(
class_affected=sender.get_name(),
parent_id=instance.pk,
child_id=child.pk,
relation_name=relation_name,
kwargs=passed_kwargs,
),
)
@pre_relation_remove([Album, Cover, Artist])
async def before_relation_remove(
sender, instance, child, relation_name, **kwargs
):
await AuditLog.objects.create(
event_type="RELATION_PRE_REMOVE",
event_log=dict(
class_affected=sender.get_name(),
parent_id=instance.pk,
child_id=child.pk,
relation_name=relation_name,
kwargs=kwargs,
),
)
@post_relation_remove([Album, Cover, Artist])
async def after_relation_remove(
sender, instance, child, relation_name, **kwargs
):
await AuditLog.objects.create(
event_type="RELATION_POST_REMOVE",
event_log=dict(
class_affected=sender.get_name(),
parent_id=instance.pk,
child_id=child.pk,
relation_name=relation_name,
kwargs=kwargs,
),
)
cover = await Cover(title="New").save()
artist = await Artist(name="Artist").save()
album = await Album(title="New Album").save()
await cover.albums.add(album, index=0)
log = await AuditLog.objects.get(event_type="RELATION_PRE_ADD")
assert log.event_log.get("parent_id") == cover.pk
assert log.event_log.get("child_id") == album.pk
assert log.event_log.get("relation_name") == "albums"
assert log.event_log.get("kwargs") == dict(index=0)
log2 = await AuditLog.objects.get(event_type="RELATION_POST_ADD")
assert log2.event_log.get("parent_id") == cover.pk
assert log2.event_log.get("child_id") == album.pk
assert log2.event_log.get("relation_name") == "albums"
assert log2.event_log.get("kwargs") == dict(index=0)
await album.artists.add(artist, dummy="test")
log3 = await AuditLog.objects.filter(
event_type="RELATION_PRE_ADD", id__gt=log2.pk
).get()
assert log3.event_log.get("parent_id") == album.pk
assert log3.event_log.get("child_id") == artist.pk
assert log3.event_log.get("relation_name") == "artists"
assert log3.event_log.get("kwargs") == dict(dummy="test")
log4 = await AuditLog.objects.get(
event_type="RELATION_POST_ADD", id__gt=log3.pk
)
assert log4.event_log.get("parent_id") == album.pk
assert log4.event_log.get("child_id") == artist.pk
assert log4.event_log.get("relation_name") == "artists"
assert log4.event_log.get("kwargs") == dict()
assert album.cover == cover
assert len(album.artists) == 1
await cover.albums.remove(album)
log = await AuditLog.objects.get(event_type="RELATION_PRE_REMOVE")
assert log.event_log.get("parent_id") == cover.pk
assert log.event_log.get("child_id") == album.pk
assert log.event_log.get("relation_name") == "albums"
assert log.event_log.get("kwargs") == dict()
log2 = await AuditLog.objects.get(event_type="RELATION_POST_REMOVE")
assert log2.event_log.get("parent_id") == cover.pk
assert log2.event_log.get("child_id") == album.pk
assert log2.event_log.get("relation_name") == "albums"
assert log2.event_log.get("kwargs") == dict()
await album.artists.remove(artist)
log3 = await AuditLog.objects.filter(
event_type="RELATION_PRE_REMOVE", id__gt=log2.pk
).get()
assert log3.event_log.get("parent_id") == album.pk
assert log3.event_log.get("child_id") == artist.pk
assert log3.event_log.get("relation_name") == "artists"
assert log3.event_log.get("kwargs") == dict()
log4 = await AuditLog.objects.get(
event_type="RELATION_POST_REMOVE", id__gt=log3.pk
)
assert log4.event_log.get("parent_id") == album.pk
assert log4.event_log.get("child_id") == artist.pk
assert log4.event_log.get("relation_name") == "artists"
assert log4.event_log.get("kwargs") == dict()
await album.load_all()
assert len(album.artists) == 0
assert album.cover is None
collerek-ormar-c09209a/tests/test_types.py 0000664 0000000 0000000 00000006710 15130200524 0020674 0 ustar 00root root 0000000 0000000 import databases
import ormar
import pytest
import sqlalchemy
from ormar.models.ormar_config import OrmarConfig
from tests.settings import DATABASE_URL
database = databases.Database(DATABASE_URL)
metadata = sqlalchemy.MetaData()
class Publisher(ormar.Model):
ormar_config = OrmarConfig(
metadata=metadata,
database=database,
tablename="publishers",
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Author(ormar.Model):
ormar_config = OrmarConfig(
metadata=metadata, database=database, tablename="authors", order_by=["-name"]
)
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
publishers = ormar.ManyToMany(Publisher)
class Book(ormar.Model):
ormar_config = OrmarConfig(
metadata=metadata,
database=database,
tablename="books",
order_by=["year", "-ranking"],
)
id: int = ormar.Integer(primary_key=True)
author = ormar.ForeignKey(Author)
title: str = ormar.String(max_length=100)
year: int = ormar.Integer(nullable=True)
ranking: int = ormar.Integer(nullable=True)
@pytest.fixture(autouse=True, scope="module")
def create_test_database():
engine = sqlalchemy.create_engine(DATABASE_URL)
metadata.drop_all(engine)
metadata.create_all(engine)
yield
metadata.drop_all(engine)
def assert_type(book: Book):
_ = str(book)
@pytest.mark.asyncio
async def test_types() -> None:
async with database:
publisher = await Publisher(name="Test publisher").save()
author = await Author.objects.create(name="Test Author")
await author.publishers.add(publisher)
await Author.objects.select_related("publishers").get()
publisher2 = await Publisher.objects.select_related("authors").get()
authors = publisher2.authors
assert authors[0] == author
for author in authors:
pass
# if TYPE_CHECKING: # pragma: no cover
# reveal_type(author) # iter of relation proxy
book = await Book.objects.create(title="Test", author=author)
book2 = await Book.objects.select_related("author").get()
await Book.objects.select_related("author").all()
await author.books.all()
assert book.author.name == "Test Author"
assert book2.author.name == "Test Author"
# if TYPE_CHECKING: # pragma: no cover
# reveal_type(publisher) # model method
# reveal_type(publishers) # many to many
# reveal_type(publishers[0]) # item in m2m list
# reveal_type(next(p for p in publishers)) # item in m2m iterator
# # getting relation without __getattribute__
# reveal_type(authors) # reverse many to many # TODO: wrong
# reveal_type(book2) # queryset get
# reveal_type(books) # queryset all
# reveal_type(book) # queryset - create
# reveal_type(query) # queryset itself
# reveal_type(book.author) # fk
# reveal_type(author.books) # reverse fk relation proxy # TODO: wrong
# reveal_type(author) # another test for queryset get different model
# reveal_type(book.author.name) # field on related model
# reveal_type(author_books) # querysetproxy result for fk # TODO: wrong
# reveal_type(author_books[0]) # item in qs proxy for fk # TODO: wrong
assert_type(book)
collerek-ormar-c09209a/tests/test_utils/ 0000775 0000000 0000000 00000000000 15130200524 0020312 5 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_utils/__init__.py 0000664 0000000 0000000 00000000000 15130200524 0022411 0 ustar 00root root 0000000 0000000 collerek-ormar-c09209a/tests/test_utils/test_models_helpers.py 0000664 0000000 0000000 00000000722 15130200524 0024731 0 ustar 00root root 0000000 0000000 from ormar.models.helpers.models import group_related_list
def test_group_related_list():
given = [
"friends__least_favourite_game",
"least_favourite_game",
"friends",
"favourite_game",
"friends__favourite_game",
]
expected = {
"least_favourite_game": [],
"favourite_game": [],
"friends": ["favourite_game", "least_favourite_game"],
}
assert group_related_list(given) == expected
collerek-ormar-c09209a/tests/test_utils/test_queryset_utils.py 0000664 0000000 0000000 00000012546 15130200524 0025034 0 ustar 00root root 0000000 0000000 from ormar.queryset.utils import (
subtract_dict,
translate_list_to_dict,
update,
update_dict_from_list,
)
from tests.settings import create_config
base_ormar_config = create_config()
def test_list_to_dict_translation():
tet_list = ["aa", "bb", "cc__aa", "cc__bb", "cc__aa__xx", "cc__aa__yy"]
test = translate_list_to_dict(tet_list)
assert test == {
"aa": Ellipsis,
"bb": Ellipsis,
"cc": {"aa": {"xx": Ellipsis, "yy": Ellipsis}, "bb": Ellipsis},
}
def test_list_to_dict_translation_with_default():
tet_list = ["aa", "aa__inner", "bb"]
testshallow = translate_list_to_dict(tet_list, default={})
assert testshallow == {"aa": {"inner": {}}, "bb": {}}
tet_list = ["aa", "aa__inner", "bb"]
testdeep = translate_list_to_dict(tet_list, default={"foo": {}})
assert testdeep == {"aa": {"foo": {}, "inner": {"foo": {}}}, "bb": {"foo": {}}}
testdeep["aa"]["foo"]["bar"] = 1234
assert testdeep == {
"aa": {"foo": {"bar": 1234}, "inner": {"foo": {}}},
"bb": {"foo": {}},
}
def test_updating_dict_with_list():
curr_dict = {
"aa": Ellipsis,
"bb": Ellipsis,
"cc": {"aa": {"xx": Ellipsis, "yy": Ellipsis}, "bb": Ellipsis},
}
list_to_update = ["ee", "bb__cc", "cc__aa__xx__oo", "cc__aa__oo"]
test = update_dict_from_list(curr_dict, list_to_update)
assert test == {
"aa": Ellipsis,
"bb": {"cc": Ellipsis},
"cc": {
"aa": {"xx": {"oo": Ellipsis}, "yy": Ellipsis, "oo": Ellipsis},
"bb": Ellipsis,
},
"ee": Ellipsis,
}
def test_updating_dict_inc_set_with_list():
curr_dict = {
"aa": Ellipsis,
"bb": Ellipsis,
"cc": {"aa": {"xx", "yy"}, "bb": Ellipsis},
}
list_to_update = ["uu", "bb__cc", "cc__aa__xx__oo", "cc__aa__oo"]
test = update_dict_from_list(curr_dict, list_to_update)
assert test == {
"aa": Ellipsis,
"bb": {"cc": Ellipsis},
"cc": {
"aa": {"xx": {"oo": Ellipsis}, "yy": Ellipsis, "oo": Ellipsis},
"bb": Ellipsis,
},
"uu": Ellipsis,
}
def test_updating_dict_inc_set_with_dict():
curr_dict = {
"aa": Ellipsis,
"bb": Ellipsis,
"cc": {"aa": {"xx", "yy"}, "bb": Ellipsis},
}
dict_to_update = {
"uu": Ellipsis,
"bb": {"cc", "dd"},
"cc": {"aa": {"xx": {"oo": Ellipsis}, "oo": Ellipsis}},
}
test = update(curr_dict, dict_to_update)
assert test == {
"aa": Ellipsis,
"bb": {"cc", "dd"},
"cc": {
"aa": {"xx": {"oo": Ellipsis}, "yy": Ellipsis, "oo": Ellipsis},
"bb": Ellipsis,
},
"uu": Ellipsis,
}
def test_subtracting_dict_inc_set_with_dict():
curr_dict = {
"aa": Ellipsis,
"bb": Ellipsis,
"cc": {"aa": {"xx", "yy"}, "bb": Ellipsis},
}
dict_to_update = {
"uu": Ellipsis,
"bb": {"cc", "dd"},
"cc": {"aa": {"xx": {"oo": Ellipsis}}, "bb": Ellipsis},
}
test = subtract_dict(curr_dict, dict_to_update)
assert test == {"aa": Ellipsis, "cc": {"aa": {"yy": Ellipsis}}}
def test_updating_dict_inc_set_with_dict_inc_set():
curr_dict = {
"aa": Ellipsis,
"bb": Ellipsis,
"cc": {"aa": {"xx", "yy"}, "bb": Ellipsis},
}
dict_to_update = {
"uu": Ellipsis,
"bb": {"cc", "dd"},
"cc": {"aa": {"xx", "oo", "zz", "ii"}},
}
test = update(curr_dict, dict_to_update)
assert test == {
"aa": Ellipsis,
"bb": {"cc", "dd"},
"cc": {"aa": {"xx", "yy", "oo", "zz", "ii"}, "bb": Ellipsis},
"uu": Ellipsis,
}
def test_subtracting_dict_inc_set_with_dict_inc_set():
curr_dict = {
"aa": Ellipsis,
"bb": Ellipsis,
"cc": {"aa": {"xx", "yy"}, "bb": Ellipsis},
"dd": {"aa", "bb"},
}
dict_to_update = {
"aa": Ellipsis,
"bb": {"cc", "dd"},
"cc": {"aa": {"xx", "oo", "zz", "ii"}},
"dd": {"aa", "bb"},
}
test = subtract_dict(curr_dict, dict_to_update)
assert test == {"cc": {"aa": {"yy"}, "bb": Ellipsis}}
def test_subtracting_with_set_and_dict():
curr_dict = {
"translation": {
"filters": {
"values": Ellipsis,
"reports": {"report": {"charts": {"chart": Ellipsis}}},
},
"translations": {"language": Ellipsis},
"filtervalues": {
"filter": {"reports": {"report": {"charts": {"chart": Ellipsis}}}}
},
},
"chart": {
"reports": {
"report": {
"filters": {
"filter": {
"translation": {
"translations": {"language": Ellipsis},
"filtervalues": Ellipsis,
},
"values": {
"translation": {"translations": {"language": Ellipsis}}
},
}
}
}
}
},
}
dict_to_update = {
"chart": Ellipsis,
"translation": {"filters", "filtervalues", "chartcolumns"},
}
test = subtract_dict(curr_dict, dict_to_update)
assert test == {"translation": {"translations": {"language": Ellipsis}}}