pax_global_header 0000666 0000000 0000000 00000000064 15121461241 0014507 g ustar 00root root 0000000 0000000 52 comment=53407d3004f7d81aa75c73c01371a6d6255154e9
pygeofilter-0.3.3/ 0000775 0000000 0000000 00000000000 15121461241 0014043 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/.github/ 0000775 0000000 0000000 00000000000 15121461241 0015403 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/.github/workflows/ 0000775 0000000 0000000 00000000000 15121461241 0017440 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/.github/workflows/main.yml 0000664 0000000 0000000 00000003437 15121461241 0021116 0 ustar 00root root 0000000 0000000 name: build ⚙️
on: [ push, pull_request ]
jobs:
test:
runs-on: ubuntu-24.04
strategy:
matrix:
python-version: ['3.9', '3.10', '3.11', '3.12', '3.13']
steps:
- uses: actions/checkout@master
- uses: actions/setup-python@v5
name: Setup Python ${{ matrix.python-version }}
with:
python-version: ${{ matrix.python-version }}
cache: pip
- name: Install requirements
run: |
sudo apt-get update
sudo apt-get install -y binutils gdal-bin libgdal-dev libproj-dev libsqlite3-mod-spatialite spatialite-bin
pip3 install -r requirements-test.txt
pip3 install -r requirements-dev.txt
pip3 install gdal=="`gdal-config --version`.*"
pip3 install .
- name: Configure sysctl limits
run: |
sudo swapoff -a
sudo sysctl -w vm.swappiness=1
sudo sysctl -w fs.file-max=262144
sudo sysctl -w vm.max_map_count=262144
- name: Install and run Elasticsearch 📦
uses: getong/elasticsearch-action@v1.2
with:
elasticsearch version: '8.2.2'
host port: 9200
container port: 9200
host node port: 9300
node port: 9300
discovery type: 'single-node'
- name: Install and run Solr 📦
uses: OSGeo/solr-action@main
with:
solr_version: 9.8.1
host_port: 8983
container_port: 8983
- name: Install and run OpenSearch 📦
uses: esmarkowski/opensearch-github-action@v1.0.0
with:
version: 2.18.0
security-disabled: true
port: 9209
- name: Run unit tests
run: |
pytest
# - name: run pre-commit (code formatting, lint and type checking)
# run: |
# python -m pip3 install pre-commit
# pre-commit run --all-files
pygeofilter-0.3.3/.github/workflows/publish.yml 0000664 0000000 0000000 00000001231 15121461241 0021626 0 ustar 00root root 0000000 0000000 name: publish
on:
push:
tags:
- release-*
- 'v*'
jobs:
publish:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@master
- uses: actions/setup-python@v5
name: Setup Python
with:
python-version: '3.11'
- name: Install build dependency
run: pip3 install wheel setuptools
- name: Build package
run: python3 setup.py sdist bdist_wheel --universal
- name: Publish package
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
pygeofilter-0.3.3/.github/workflows/release-please.yml 0000664 0000000 0000000 00000000406 15121461241 0023052 0 ustar 00root root 0000000 0000000 on:
push:
branches:
- main
name: release-please
jobs:
release-please:
runs-on: ubuntu-latest
steps:
- uses: googleapis/release-please-action@v4
with:
token: ${{ secrets.PAT_WORKFLOW }}
release-type: python
pygeofilter-0.3.3/.gitignore 0000664 0000000 0000000 00000003441 15121461241 0016035 0 ustar 00root root 0000000 0000000 # Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
.doctrees
.vscode
.idea
pygeofilter-0.3.3/.pre-commit-config.yaml 0000664 0000000 0000000 00000001466 15121461241 0020333 0 ustar 00root root 0000000 0000000 repos:
- repo: https://github.com/psf/black
rev: 25.1.0
hooks:
- id: black
language_version: python
- repo: https://github.com/PyCQA/isort
rev: 6.0.1
hooks:
- id: isort
language_version: python
- repo: https://github.com/PyCQA/flake8
rev: 7.2.0
hooks:
- id: flake8
language_version: python
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.15.0
hooks:
- id: mypy
language_version: python
args: [--install-types, --non-interactive]
# N.B.: Mypy is... a bit fragile.
# ref: https://github.com/python/mypy/issues/4008
# The issue is that we have too many evaluate.py or parser.py and mypy believe they are all the same
# when run within pre-commit
files: ^pygeofilter*
pygeofilter-0.3.3/.readthedocs.yaml 0000664 0000000 0000000 00000000634 15121461241 0017275 0 ustar 00root root 0000000 0000000 # .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
# Optionally set requirements required to build your docs
python:
install:
- requirements: docs/requirements.txt
- requirements: requirements-test.txt
pygeofilter-0.3.3/CHANGELOG.md 0000664 0000000 0000000 00000016513 15121461241 0015662 0 ustar 00root root 0000000 0000000 # Changelog
## [0.3.3](https://github.com/geopython/pygeofilter/compare/v0.3.2...v0.3.3) (2025-12-20)
### Bug Fixes
* **sql:** make the use of `ILIKE` opt-in ([472a416](https://github.com/geopython/pygeofilter/commit/472a41663c66dfc318c269d9886816990d3de138))
## [0.3.2](https://github.com/geopython/pygeofilter/compare/v0.3.1...v0.3.2) (2025-12-19)
### Bug Fixes
* **#68:** adds missing predicate expressions in lark grammar for NOT ... ([326fe0c](https://github.com/geopython/pygeofilter/commit/326fe0cf9c4c8d62198dee5686b749fc7f3ce613))
* **cql2_json:** missing envelope in instance check ([18c382b](https://github.com/geopython/pygeofilter/commit/18c382b6632598b3ea6186f0c03a6b5a56d828cd))
## [0.3.1](https://github.com/geopython/pygeofilter/compare/v0.3.0...v0.3.1) (2024-12-31)
### Bug Fixes
* **CI:** using separate file for tracking version to help with release-please action ([1c28b7c](https://github.com/geopython/pygeofilter/commit/1c28b7c45415ecedabd01570b114902f1d8f9310))
## [0.3.0](https://github.com/geopython/pygeofilter/compare/v0.2.4...v0.3.0) (2024-12-30)
### Features
* add support for OpenSearch backend ([#111](https://github.com/geopython/pygeofilter/pull/111))
* Update lark ([#110](https://github.com/geopython/pygeofilter/pull/110))
### Bug Fixes
* Handle boolean in ecql like cql_text ([#108](https://github.com/geopython/pygeofilter/pull/108))
* Fix compatibility with i386 ([#107](https://github.com/geopython/pygeofilter/pull/107))
* add FES parser import shortcut as other filter languages ([#102](https://github.com/geopython/pygeofilter/pull/102))
### Miscellaneous Chores
* release 0.3.0 ([48de1f1](https://github.com/geopython/pygeofilter/commit/48de1f128c4956a99d6760487146636122e119a3))
## [0.2.4](https://github.com/geopython/pygeofilter/compare/v0.2.3...v0.2.4) (2024-07-10)
### Bug Fixes
* bumping version to 0.2.4 ([21bc095](https://github.com/geopython/pygeofilter/commit/21bc0957c84244b7d39dbe164f00d143d952c684))
## [0.2.3](https://github.com/geopython/pygeofilter/compare/v0.2.2...v0.2.3) (2024-07-10)
### Bug Fixes
* adding dependency for publishing packages ([249926e](https://github.com/geopython/pygeofilter/commit/249926ef2ebe264b616ce0f039a8b0e1b8626dda))
## [0.2.2](https://github.com/geopython/pygeofilter/compare/v0.2.1...v0.2.2) (2024-07-10)
### Bug Fixes
* [#85](https://github.com/geopython/pygeofilter/issues/85) ([2f1a38f](https://github.com/geopython/pygeofilter/commit/2f1a38f8bc9dfe2ebf5c318c6121d7f51029a9cf))
* Addresses [#95](https://github.com/geopython/pygeofilter/issues/95). ([d51dbb0](https://github.com/geopython/pygeofilter/commit/d51dbb0eb7a1066bd97b81cffe99da11ebf3cba4))
* Addresses [#95](https://github.com/geopython/pygeofilter/issues/95). ([2a51990](https://github.com/geopython/pygeofilter/commit/2a519904c4ac408fabb39459104efcc3e09f3a40))
* Bump pre-commit dependencies ([90f4aaa](https://github.com/geopython/pygeofilter/commit/90f4aaaafe873c69b0ccd91e897a9ff218ef5110))
* Bump pre-commit dependencies ([64f7f96](https://github.com/geopython/pygeofilter/commit/64f7f962476665d4ae4eed750099a6c887ad21ca))
* Bump pre-commit dependencies ([11f1f9a](https://github.com/geopython/pygeofilter/commit/11f1f9ab71811da758aa67b13aeb2f0cce7aaa10))
* Enable custom handling of undefined field attr in to_filter ([23f172c](https://github.com/geopython/pygeofilter/commit/23f172cf1dd1ddb19791a761f128b001e887b361))
* Enable custom handling of undefined field attr in to_filter ([f0c7e9f](https://github.com/geopython/pygeofilter/commit/f0c7e9f36d55d80e1d17917a627ae5547c80363c))
* Enable custom handling of undefined field attr in to_filter ([d829c6b](https://github.com/geopython/pygeofilter/commit/d829c6be5254a45689d8bcdb52b28b8a5ed3b5b2))
* Support prefixed attribute names in cql2-text and ecql parsing ([dbe4e9e](https://github.com/geopython/pygeofilter/commit/dbe4e9e5c0c48698f312e1cc023a43ea78391f60))
* Support prefixed attribute names in cql2-text and ecql parsing ([5318c6b](https://github.com/geopython/pygeofilter/commit/5318c6bcf6e2620d39c8bc52fa13cc40e02274ac))
* Support prefixed attribute names in cql2-text and ecql parsing ([122a5a6](https://github.com/geopython/pygeofilter/commit/122a5a6c5ba746a51bf9eb36a5d9617201d19123))
* Updating release-please to v4 ([11757ec](https://github.com/geopython/pygeofilter/commit/11757eca4a7ba71fbca575636117b6eb8b3c9e53))
### [0.2.1](https://www.github.com/geopython/pygeofilter/compare/v0.2.0...v0.2.1) (2023-02-16)
### Bug Fixes
* dt naivety ([08fb5f5](https://www.github.com/geopython/pygeofilter/commit/08fb5f5f8b0a5ee39443a6233d558bbacadb5acb))
* order of date/datetime checking in native evaluator ([d37d7c8](https://www.github.com/geopython/pygeofilter/commit/d37d7c8cb483fdb9ff53ff9f871d5a8f85a227e1))
* pinning sqlalchemy to version < 2.0.0 ([6e67239](https://www.github.com/geopython/pygeofilter/commit/6e67239eb1af9a77599bbbc8cee211c9f906d95e))
* timezone handling for dates ([6c0e5c1](https://www.github.com/geopython/pygeofilter/commit/6c0e5c17ce5dde2dc541ccd6411c55d2a86e52ec))
## [0.2.0](https://www.github.com/geopython/pygeofilter/compare/v0.1.2...v0.2.0) (2022-10-17)
### Features
* adding initial elasticsearch implmentation ([2ccfa02](https://www.github.com/geopython/pygeofilter/commit/2ccfa02d5fcf1ee1f3be76f5cf375ace2556fa6c))
### [0.1.2](https://www.github.com/geopython/pygeofilter/compare/v0.1.1...v0.1.2) (2022-04-21)
### Bug Fixes
* Allowing intervals to actually contain subnodes ([83b7c63](https://www.github.com/geopython/pygeofilter/commit/83b7c63ad9233a9ed600f061d3b8e074291dcb8c))
### [0.1.1](https://www.github.com/geopython/pygeofilter/compare/v0.1.0...v0.1.1) (2022-02-08)
### Bug Fixes
* Fixing compatibility issues with Python 3.6 type checking ([ad7ddd7](https://www.github.com/geopython/pygeofilter/commit/ad7ddd7a332f838fa284e1493f0d3cc15036ad95))
* Improved typing ([2272b3b](https://www.github.com/geopython/pygeofilter/commit/2272b3b9371ff90fe5cbc9b8f84cbf6bb5cca76a))
* Improving structure of CI for type checking ([fb755a3](https://www.github.com/geopython/pygeofilter/commit/fb755a3859baf3a07f57938da2259b5c3fb74575))
* Improving typing ([6c3584b](https://www.github.com/geopython/pygeofilter/commit/6c3584b3961fe90cc07f08f6cc8f2256112850f3))
* Improving typing on CQL2 JSON ([e0747aa](https://www.github.com/geopython/pygeofilter/commit/e0747aa2d0dbcaedd49bd9bcf30e702da68aaa37))
* more concise type checking ([87e46a2](https://www.github.com/geopython/pygeofilter/commit/87e46a2c325fb5f1c1c92408369efdf263f387db))
* mypy dependency installation (using --non-interactive) ([84a1175](https://www.github.com/geopython/pygeofilter/commit/84a11752c48773650a063a767eb97a1fa149b0ac))
* Split up Django spatial filters ([484e0b3](https://www.github.com/geopython/pygeofilter/commit/484e0b3db483db76b6456593a33ee8598f72813d))
## [0.1.0](https://www.github.com/geopython/pygeofilter/compare/v0.1.0...v0.1.0) (2021-11-18)
### Features
* Fixing release-please package name ([2b666fc](https://www.github.com/geopython/pygeofilter/commit/2b666fc5b09c2ff15fa954f035a342542aa3577f))
### Miscellaneous Chores
* release 0.1.0 ([d5e4971](https://www.github.com/geopython/pygeofilter/commit/d5e49718f7f2c7936649217b286ebad42b168a23))
## 0.1.0 (2021-11-18)
### Features
* Merge pull request [#34](https://www.github.com/geopython/pygeofilter/issues/34) from geopython/cql2_json ([5d439b2](https://www.github.com/geopython/pygeofilter/commit/5d439b277e12b883f3132d4972d2979a8aefd92e))
pygeofilter-0.3.3/CONTRIBUTING.md 0000664 0000000 0000000 00000012512 15121461241 0016275 0 ustar 00root root 0000000 0000000 # Contributing to pygeofilter
We welcome contributions to pygeofilter, in the form of issues, bug fixes, documentation or suggestions for enhancements. This document sets out our guidelines and best practices for such contributions.
It's based on the [Contributing to pygeoapi](https://github.com/geopython/pygeoapi/blob/master/CONTRIBUTING.md) guide which is based on the [Contributing to Open Source Projects
Guide](https://contribution-guide-org.readthedocs.io/).
pygeofilter has the following modes of contribution:
- GitHub Commit Access
- GitHub Pull Requests
## Code of Conduct
Contributors to this project are expected to act respectfully toward others in accordance with the [OSGeo Code of Conduct](https://www.osgeo.org/code_of_conduct).
## Submitting Bugs
### Due Diligence
Before submitting a bug, please do the following:
* Perform __basic troubleshooting__ steps:
* __Make sure you're on the latest version.__ If you're not on the most
recent version, your problem may have been solved already! Upgrading is
always the best first step.
* [__Search the issue
tracker__](https://github.com/geopython/pygeofilter/issues)
to make sure it's not a known issue.
### What to put in your bug report
Make sure your report gets the attention it deserves: bug reports with missing information may be ignored or punted back to you, delaying a fix. The below constitutes a bare minimum; more info is almost always better:
* __What version of Python are you using?__ For example, are you using Python 3.8+, PyPy 2.0?
* __What operating system are you using?__ Windows (7, 8, 10, 32-bit, 64-bit), Mac OS X, (10.7.4, 10.9.0), GNU/Linux (which distribution, which version?) Again, more detail is better.
* __Which version or versions of the software are you using?__ Ideally, you've followed the advice above and are on the latest version, but please confirm this.
* __How can the we recreate your problem?__ Imagine that we have never used pygeofilter before and have downloaded it for the first time. Exactly what steps do we need to take to reproduce your problem?
## Contributions and Licensing
### Contributor License Agreement
Your contribution will be under our [license](https://github.com/geopython/pygeofilter/blob/main/LICENSE) as per [GitHub's terms of service](https://help.github.com/articles/github-terms-of-service/#6-contributions-under-repository-license).
### GitHub Commit Access
* Proposals to provide developers with GitHub commit access shall be raised on the pygeofilter [discussions page](https://github.com/geopython/pygeofilter/discussions). Committers shall be added by the project admin.
* Removal of commit access shall be handled in the same manner.
### GitHub Pull Requests
* Pull requests may include copyright in the source code header by the contributor if the contribution is significant or the contributor wants to claim copyright on their contribution.
* All contributors shall be listed at https://github.com/geopython/pygeofilter/graphs/contributors
* Unclaimed copyright, by default, is assigned to the main copyright holders as specified in https://github.com/geopython/pygeofilter/blob/main/LICENSE
### Version Control Branching
* Always __make a new branch__ for your work, no matter how small. This makes it easy for others to take just that one set of changes from your repository, in case you have multiple unrelated changes floating around.
* __Don't submit unrelated changes in the same branch/pull request!__ If it is not possible to review your changes quickly and easily, we may reject your request.
* __Base your new branch off of the appropriate branch__ on the main repository:
* In general the released version of pygeofilter is based on the ``main`` (default) branch whereas development work is done under other non-default branches. Unless you are sure that your issue affects a non-default branch, __base your branch off the ``main`` one__.
* Note that depending on how long it takes for the dev team to merge your
patch, the copy of ``main`` you worked off of may get out of date!
* If you find yourself 'bumping' a pull request that's been sidelined for a while, __make sure you rebase or merge to latest ``main``__ to ensure a speedier resolution.
### Documentation
* documentation is managed in `docs/`, in reStructuredText format
* [Sphinx](https://www.sphinx-doc.org) is used to generate the documentation
* See the [reStructuredText Primer](https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html) on rST markup and syntax
### Code Formatting
* __Please follow the coding conventions and style used in the pygeofilter repository.__
* pygeofilter follows the [PEP-8](http://www.python.org/dev/peps/pep-0008/) guidelines
* 80 characters
* spaces, not tabs
* pygeofilter, instead of PyGeoFilter, pygeoFilter, etc.
#### **pre-commit**
The project is using [`pre-commit`](https://pre-commit.com) to automatically run code formatting and type checking on new commits. Please install `pre-commit` and enable it on your environment before pushing new commits.
```bash
# Install pre-commit
pip3 install pre-commit
# Enable pre-commit
cd /pygeofilter
pre-commit install
# Optional - run pre-commit manually
pre-commit run --all-files
```
## Suggesting Enhancements
We welcome suggestions for enhancements, but reserve the right to reject them if they do not follow future plans for pygeofilter.
pygeofilter-0.3.3/Dockerfile-3.9 0000664 0000000 0000000 00000001410 15121461241 0016340 0 ustar 00root root 0000000 0000000 FROM python:3.9-slim-bullseye
LABEL description="Test executor"
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update --fix-missing \
&& apt-get install -y --no-install-recommends \
binutils \
libproj-dev \
gdal-bin \
libgdal-dev \
libsqlite3-mod-spatialite \
spatialite-bin \
build-essential \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir /app
WORKDIR /app
COPY requirements-test.txt .
COPY requirements-dev.txt .
RUN pip3 install -r requirements-test.txt
RUN pip3 install -r requirements-dev.txt
RUN gdal_version=$(gdal-config --version) && \
pip install pygdal=="$gdal_version.*"
COPY pygeofilter pygeofilter
COPY tests tests
COPY README.md .
COPY setup.py .
RUN pip3 install -e .
RUN chmod +x tests/execute-tests.sh
CMD ["tests/execute-tests.sh"]
pygeofilter-0.3.3/LICENSE 0000664 0000000 0000000 00000002052 15121461241 0015047 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) 2021 geopython
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
pygeofilter-0.3.3/MANIFEST.in 0000664 0000000 0000000 00000000172 15121461241 0015601 0 ustar 00root root 0000000 0000000 recursive-include pygeofilter *.py *.lark
global-include *.lark
include README.md
include LICENSE
include requirements.txt pygeofilter-0.3.3/README.md 0000664 0000000 0000000 00000033074 15121461241 0015331 0 ustar 00root root 0000000 0000000 # pygeofilter
pygeofilter is a pure Python parser implementation of OGC filtering standards
[](https://badge.fury.io/py/pygeofilter)
[](https://github.com/geopython/pygeofilter/actions)
[](https://pygeofilter.readthedocs.io/en/latest/?badge=latest)
## Features
* Parsing of several filter encoding standards
* [CQL as defined in CSW 2.0](https://portal.ogc.org/files/?artifact_id=20555)
* [CQL JSON as defined in OGC API - Features - Part 3: Filtering and the Common Query Language (CQL)](https://portal.ogc.org/files/96288#cql-json-schema)
* [JSON Filter Expressions (JFE)](https://github.com/tschaub/ogcapi-features/tree/json-array-expression/extensions/cql/jfe)
* Soon:
* [CQL Text as defined in OGC API - Features - Part 3: Filtering and the Common Query Language (CQL)](https://portal.ogc.org/files/96288#cql-bnf)
* [FES](http://docs.opengeospatial.org/is/09-026r2/09-026r2.html)
* Several backends included
* [Django](https://www.djangoproject.com/)
* [SQLAlchemy](https://www.sqlalchemy.org/)
* [(Geo)Pandas](https://pandas.pydata.org/)
* Native Python objects
## Installation
The package can be installed via PIP:
```bash
pip3 install pygeofilter
```
Some features require additional dependencies. This currently only affects the backends. To install these, the features have to be listed:
```bash
# for the Django backend
pip3 install pygeofilter[backend-django]
# for the sqlalchemy backend
pip3 install pygeofilter[backend-sqlalchemy]
# for the native backend
pip3 install pygeofilter[backend-native]
```
## Usage
pygeofilter can be used on various levels. It provides parsers for various filtering languages, such as ECQL or CQL-JSON. Each parser lives in its own sub-package:
```python
>>> from pygeofilter.parsers.ecql import parse as parse_ecql
>>> filters = parse_ecql(filter_expression)
>>> from pygeofilter.parsers.cql_json import parse as parse_json
>>> filters = parse_json(filter_expression)
```
Each parser creates an abstract syntax tree (AST) representation of that filter expression and thus unifies all possible languages to a single common denominator. All possible nodes are defined as classes in the `pygeofilter.ast` module.
### Inspection
The easiest way to inspect the resulting AST is to use the `get_repr` function, which returns a
nice string representation of what was parsed:
```python
>>> filters = pygeofilter.parsers.ecql.parse('id = 10')
>>> print(pygeofilter.get_repr(ast))
ATTRIBUTE id = LITERAL 10.0
>>>
>>>
>>> filter_expr = '(number BETWEEN 5 AND 10 AND string NOT LIKE \'%B\') OR INTERSECTS(geometry, LINESTRING(0 0, 1 1))'
>>> print(pygeofilter.ast.get_repr(pygeofilter.parse(filter_expr)))
(
(
ATTRIBUTE number BETWEEN 5 AND 10
) AND (
ATTRIBUTE string NOT LIKE '%B'
)
) OR (
INTERSECTS(ATTRIBUTE geometry, Geometry(geometry={'type': 'LineString', 'coordinates': ((0.0, 0.0), (1.0, 1.0))}))
)
```
### Evaluation
A parsed AST can then be evaluated and transformed into filtering mechanisms in the required context. Usually this is a language such as SQL or an object-relational mapper (ORM) interfacing a data store of some kind.
There are a number of pre-defined backends available, where parsed expressions can be applied. For the moment this includes:
* Django
* sqlalchemy
* (Geo)Pandas
* Elasticsearch
* OpenSearch
* Pure Python object filtering
The usage of those are described in their own documentation.
pygeofilter provides mechanisms to help building such an evaluator (the included backends use them as well). The `Evaluator` class allows to conveniently walk through an AST depth-first and build the filters for the API in question. Only handled node classes are evaluated, unsupported ones will raise an exception.
Consider this example:
```python
from pygeofilter import ast
from pygeofilter.backends.evaluator import Evaluator, handle
from myapi import filters # <- this is where the filters are created.
# of course, this can also be done in the
# evaluator itself
# Evaluators must derive from the base class `Evaluator` to work
class MyAPIEvaluator(Evaluator):
# you can use any constructor as you need
def __init__(self, field_mapping=None, mapping_choices=None):
self.field_mapping = field_mapping
self.mapping_choices = mapping_choices
# specify the handled classes in the `handle` decorator to mark
# this function as the handler for that node class(es)
@handle(ast.Not)
def not_(self, node, sub):
return filters.negate(sub)
# multiple classes can be declared for the same handler function
@handle(ast.And, ast.Or)
def combination(self, node, lhs, rhs):
return filters.combine((lhs, rhs), node.op.value)
# handle all sub-classes, like ast.Equal, ast.NotEqual,
# ast.LessThan, ast.GreaterThan, ...
@handle(ast.Comparison, subclasses=True)
def comparison(self, node, lhs, rhs):
return filters.compare(
lhs,
rhs,
node.op.value,
self.mapping_choices
)
@handle(ast.Between)
def between(self, node, lhs, low, high):
return filters.between(
lhs,
low,
high,
node.not_
)
@handle(ast.Like)
def like(self, node, lhs):
return filters.like(
lhs,
node.pattern,
node.nocase,
node.not_,
self.mapping_choices
)
@handle(ast.In)
def in_(self, node, lhs, *options):
return filters.contains(
lhs,
options,
node.not_,
self.mapping_choices
)
def adopt(self, node, *sub_args):
# a "catch-all" function for node classes that are not
# handled elsewhere. Use with caution and raise exceptions
# yourself when a node class is not supported.
...
# ...further ast handlings removed for brevity
```
### Command line utility
pygeofilter also includes a command-line utility (`pygeofilter`). Current functionality includes testing filter parsing into the native AST. Examples:
```bash
# display installed version
pygeofilter --version
# parse subcommand functionality
pygeofilter parse
Usage: pygeofilter parse [OPTIONS] {cql_json|cql2_json|cql2_text|ecql|fes|jfe}
QUERY
Parse a query into an abstract syntax tree
Options:
-v, --verbosity [ERROR|WARNING|INFO|DEBUG]
Verbosity
--help Show this message and exit.
# parse a CQL2 text string into AST
pygeofilter parse cql2_text "title = "birds'"
Parsing cql2_text query into AST
Equal(lhs=ATTRIBUTE title, rhs='birds')
# parse a FES text string into AST
pygeofilter parse fes "titlebirds"
Parsing fes query into AST
Equal(lhs=ATTRIBUTE title, rhs='birds')
```
## Testing
For testing, several requirements must be satisfied. These can be installed, via pip:
```bash
pip3 install -r requirements-dev.txt
pip3 install -r requirements-test.txt
```
GDAL must also be available in the local environment.
The functionality can be tested using `pytest`.
```bash
python -m pytest
```
### Docker
To execute tests with Docker Compose:
```
./execute-tests.sh
```
## Backends
The following backends are shipped with `pygeofilter`. Some require additional dependencies, refer to the [installation](#installation) section for further details.
### Django
For Django there is a default backend implementation, where all the filters are translated to the
Django ORM. In order to use this integration, we need two dictionaries, one mapping the available
fields to the Django model fields, and one to map the fields that use `choices`. Consider the
following example models:
```python
from django.contrib.gis.db import models
optional = dict(null=True, blank=True)
class Record(models.Model):
identifier = models.CharField(max_length=256, unique=True, null=False)
geometry = models.GeometryField()
float_attribute = models.FloatField(**optional)
int_attribute = models.IntegerField(**optional)
str_attribute = models.CharField(max_length=256, **optional)
datetime_attribute = models.DateTimeField(**optional)
choice_attribute = models.PositiveSmallIntegerField(choices=[
(1, 'ASCENDING'),
(2, 'DESCENDING'),],
**optional)
class RecordMeta(models.Model):
record = models.ForeignKey(Record, on_delete=models.CASCADE, related_name='record_metas')
float_meta_attribute = models.FloatField(**optional)
int_meta_attribute = models.IntegerField(**optional)
str_meta_attribute = models.CharField(max_length=256, **optional)
datetime_meta_attribute = models.DateTimeField(**optional)
choice_meta_attribute = models.PositiveSmallIntegerField(choices=[
(1, 'X'),
(2, 'Y'),
(3, 'Z')],
**optional)
```
Now we can specify the field mappings and mapping choices to be used when applying the filters:
```python
FIELD_MAPPING = {
'identifier': 'identifier',
'geometry': 'geometry',
'floatAttribute': 'float_attribute',
'intAttribute': 'int_attribute',
'strAttribute': 'str_attribute',
'datetimeAttribute': 'datetime_attribute',
'choiceAttribute': 'choice_attribute',
# meta fields
'floatMetaAttribute': 'record_metas__float_meta_attribute',
'intMetaAttribute': 'record_metas__int_meta_attribute',
'strMetaAttribute': 'record_metas__str_meta_attribute',
'datetimeMetaAttribute': 'record_metas__datetime_meta_attribute',
'choiceMetaAttribute': 'record_metas__choice_meta_attribute',
}
MAPPING_CHOICES = {
'choiceAttribute': dict(Record._meta.get_field('choice_attribute').choices),
'choiceMetaAttribute': dict(RecordMeta._meta.get_field('choice_meta_attribute').choices),
}
```
Finally we are able to connect the CQL AST to the Django database models. We also provide factory
functions to parse the timestamps, durations, geometries and envelopes, so that they can be used
with the ORM layer:
```python
from pygeofilter.backends.django import to_filter
from pygeofilter.parsers.ecql import parse
cql_expr = 'strMetaAttribute LIKE \'%parent%\' AND datetimeAttribute BEFORE 2000-01-01T00:00:01Z'
ast = parse(cql_expr)
filters = to_filter(ast, mapping, mapping_choices)
qs = Record.objects.filter(**filters)
```
### SQL
`pygeofilter` provides a rudimentary way to create an SQL `WHERE` clause from an AST. The following example shows this usage in conjunction with the OGR `ExecuteSQL` function:
```python
from osgeo import ogr
from pygeofilter.backends.sql import to_sql_where
from pygeofilter.parsers.ecql import parse
FIELD_MAPPING = {
'str_attr': 'str_attr',
'maybe_str_attr': 'maybe_str_attr',
'int_attr': 'int_attr',
'float_attr': 'float_attr',
'date_attr': 'date_attr',
'datetime_attr': 'datetime_attr',
'point_attr': 'GEOMETRY',
}
FUNCTION_MAP = {
'sin': 'sin'
}
# parse the expression
ast = parse('int_attr > 6')
# open an OGR DataSource
data = ogr.Open(...)
# create the WHERE clause, field and function mappings must be provided
where = to_sql_where(ast, FIELD_MAPPING, FUNCTION_MAP)
# filter the DataSource to get a result Layer
layer = data.ExecuteSQL(f"""
SELECT id, str_attr, maybe_str_attr, int_attr, float_attr, date_attr, datetime_attr, GEOMETRY
FROM layer
WHERE {where}
""", None, "SQLite")
```
Note that it is vital to specify the `SQLite` dialect as this is the one used internally.
:warning: Input values are *not* sanitized/separated from the generated SQL text. This is due to the compatibility with the OGR API not allowing to separate the SQL from the arguments.
### Optimization
This is a special kind of backend, as the result of the AST evaluation is actually a new AST. The purpose of this backend is to eliminate static branches of the AST, potentially reducing the cost of an actual evaluation for filtering values.
What parts of an AST can be optimized:
- Arithmetic operations of purely static operands
- All predicates (spatial, temporal, array, `like`, `between`, `in`) if all of the operands are already static
- Functions, when passed in a special lookup table and all arguments are static
- `And` and `Or` combinators can be eliminated if either branch can be predicted
What cannot be optimized are branches that contain references to attributes or functions not passed in the dictionary.
The following example shows how a static computation can be optimized to a static value, replacing the whole branch of the AST:
```python
>>> import math
>>> from pygeofilter import ast
>>> from pygeofilter.parsers.ecql import parse
>>> from pygeofilter.backends.optimize import optimize
>>>
>>> root = parse('attr < sin(3.7) - 5')
>>> optimized_root = optimize(root, {'sin': math.sin})
>>> print(ast.get_repr(root))
ATTRIBUTE attr < (
(
sin (3.7)
) - 5
)
>>> print(ast.get_repr(optimized_root))
ATTRIBUTE attr < -5.529836140908493
```
pygeofilter-0.3.3/SECURITY.md 0000664 0000000 0000000 00000001065 15121461241 0015636 0 ustar 00root root 0000000 0000000 # pygeofilter Security Policy
## Reporting
Security/vulnerability reports **should not** be submitted through GitHub issues or public discussions, but instead please send your report
to **geopython-security nospam @ lists.osgeo.org** - (remove the blanks and 'nospam').
## Supported Versions
The pygeofilter developer team will release patches for security vulnerabilities for the following versions:
| Version | Supported |
| ------- | ------------------ |
| latest stable version | :white_check_mark: |
| previous versions | :x: |
pygeofilter-0.3.3/debian/ 0000775 0000000 0000000 00000000000 15121461241 0015265 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/debian/changelog 0000664 0000000 0000000 00000000225 15121461241 0017136 0 ustar 00root root 0000000 0000000 pygeofilter (0.0.3-0~focal0) focal; urgency=low
* Initial packaging.
-- Angelos Tzotsos Tue, 12 Oct 2021 13:00:00 +0300 pygeofilter-0.3.3/debian/compat 0000664 0000000 0000000 00000000002 15121461241 0016463 0 ustar 00root root 0000000 0000000 9
pygeofilter-0.3.3/debian/control 0000664 0000000 0000000 00000001357 15121461241 0016676 0 ustar 00root root 0000000 0000000 Source: pygeofilter
Maintainer: Fabian Schindler
Uploaders: Angelos Tzotsos
Section: python
Priority: optional
Build-Depends: debhelper (>= 9),
python3-setuptools,
dh-python,
dpkg-dev (>= 1.16),
autoconf,
python3-all,
python3-all-dev
Standards-Version: 3.9.3
Homepage: https://github.com/geopython/pygeofilter
Package: python3-pygeofilter
Architecture: any
Section: web
Depends: ${shlibs:Depends},
${misc:Depends},
python3,
python3-click
Description: This package contains the pygeofilter library
.
pygeofilter is a pure Python parser implementation of OGC filtering standards.
pygeofilter-0.3.3/debian/copyright 0000664 0000000 0000000 00000002367 15121461241 0017230 0 ustar 00root root 0000000 0000000 This package was debianized by Angelos Tzotsos on
Tue, 12 Oct 2021 13:00:00 +0300.
It was downloaded from:
https://github.com/geopython/pygeofilter
Copyright:
Copyright (c) 2021 geopython
License:
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies of this Software or works derived from this Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
pygeofilter-0.3.3/debian/rules 0000775 0000000 0000000 00000000406 15121461241 0016345 0 ustar 00root root 0000000 0000000 #!/usr/bin/make -f
# -*- makefile -*-
# Uncomment this to turn on verbose mode.
#export DH_VERBOSE=1
export PYBUILD_NAME=pygeofilter
%:
dh $@ --with python3 --buildsystem pybuild
override_dh_auto_test:
@echo "nocheck set, not running tests"
pygeofilter-0.3.3/debian/source/ 0000775 0000000 0000000 00000000000 15121461241 0016565 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/debian/source/format 0000664 0000000 0000000 00000000014 15121461241 0017773 0 ustar 00root root 0000000 0000000 3.0 (quilt)
pygeofilter-0.3.3/docker-compose.test.yml 0000664 0000000 0000000 00000001203 15121461241 0020452 0 ustar 00root root 0000000 0000000 services:
elasticsearch:
image: elasticsearch:8.2.2
ports:
- 9200:9200
- 9300:9300
environment:
discovery.type: single-node
xpack.security.enabled: false
xpack.security.http.ssl.enabled: false
ES_JAVA_OPTS: -Xms1g -Xmx1g
healthcheck:
test: ["CMD-SHELL", "curl -s http://localhost:9200/_cluster/health | grep -q '\"status\":\"green\"'"]
interval: 30s
timeout: 10s
retries: 3
tester:
build:
dockerfile: ./Dockerfile-3.9
environment:
PYGEOFILTER_ELASTIC_HOST: elasticsearch
depends_on:
elasticsearch:
condition: service_healthy
pygeofilter-0.3.3/docs/ 0000775 0000000 0000000 00000000000 15121461241 0014773 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/docs/.gitignore 0000664 0000000 0000000 00000000004 15121461241 0016755 0 ustar 00root root 0000000 0000000 api
pygeofilter-0.3.3/docs/Makefile 0000664 0000000 0000000 00000001104 15121461241 0016427 0 ustar 00root root 0000000 0000000 # Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) pygeofilter-0.3.3/docs/conf.py 0000664 0000000 0000000 00000013245 15121461241 0016277 0 ustar 00root root 0000000 0000000 # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath(".."))
# -- Project information -----------------------------------------------------
project = "pygeofilter"
copyright = "2021, Fabian Schindler"
author = "Fabian Schindler"
# The short X.Y version
version = ""
# The full version, including alpha/beta/rc tags
release = "0.0.3"
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinxcontrib.apidoc",
"m2r2",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
# The master toctree document.
master_doc = "index"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = "pygeofilterdoc"
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"pygeofilter.tex",
"pygeofilter Documentation",
"Fabian Schindler",
"manual",
),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "pygeofilter", "pygeofilter Documentation", [author], 1)]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"pygeofilter",
"pygeofilter Documentation",
author,
"pygeofilter",
"One line description of project.",
"Miscellaneous",
),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ["search.html"]
# -- Extension configuration -------------------------------------------------
intersphinx_mapping = {
"python": ("https://python.readthedocs.org/en/latest/", None),
"django": ("https://django.readthedocs.org/en/latest/", None),
}
# apidoc configs:
apidoc_module_dir = "../pygeofilter"
apidoc_output_dir = "api"
# apidoc_excluded_paths = ['tests']
# apidoc_separate_modules = True
# apidoc_module_first = True
pygeofilter-0.3.3/docs/contributing.rst 0000664 0000000 0000000 00000000041 15121461241 0020227 0 ustar 00root root 0000000 0000000 .. mdinclude:: ../CONTRIBUTING.md pygeofilter-0.3.3/docs/index.rst 0000664 0000000 0000000 00000000330 15121461241 0016630 0 ustar 00root root 0000000 0000000 .. mdinclude:: ../README.md
.. toctree::
:maxdepth: 2
:caption: Contents:
license
contributing
api/modules
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
pygeofilter-0.3.3/docs/license.rst 0000664 0000000 0000000 00000000065 15121461241 0017150 0 ustar 00root root 0000000 0000000 License
=======
.. include:: ../LICENSE
:literal: pygeofilter-0.3.3/docs/make.bat 0000664 0000000 0000000 00000001427 15121461241 0016404 0 ustar 00root root 0000000 0000000 @ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=.
set BUILDDIR=_build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.https://www.sphinx-doc.org
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
:end
popd
pygeofilter-0.3.3/docs/requirements.txt 0000664 0000000 0000000 00000000032 15121461241 0020252 0 ustar 00root root 0000000 0000000 sphinxcontrib-apidoc
m2r2
pygeofilter-0.3.3/examples/ 0000775 0000000 0000000 00000000000 15121461241 0015661 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/examples/cql2.ipynb 0000664 0000000 0000000 00000014041 15121461241 0017565 0 ustar 00root root 0000000 0000000 {
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"id": "fe8453fa",
"metadata": {},
"outputs": [],
"source": [
"from pygeofilter.parsers.cql2_json import parse\n",
"from pygeofilter.backends.cql2_json import to_cql2\n",
"import json\n",
"import traceback\n",
"from lark import lark, logger, v_args\n",
"from pygeofilter.cql2 import BINARY_OP_PREDICATES_MAP\n"
]
},
{
"cell_type": "code",
"execution_count": 2,
"id": "b960603d",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"And(lhs=And(lhs=And(lhs=Equal(lhs=ATTRIBUTE collection, rhs='landsat8_l1tp'), rhs=LessEqual(lhs=ATTRIBUTE gsd, rhs=30)), rhs=LessEqual(lhs=ATTRIBUTE eo:cloud_cover, rhs=10)), rhs=GreaterEqual(lhs=ATTRIBUTE datetime, rhs=datetime.datetime(2021, 4, 8, 4, 39, 23, tzinfo=)))"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from pygeofilter.parsers.cql2_text import parse as cql2_parse\n",
"cql2_parse(\"collection = 'landsat8_l1tp' AND gsd <= 30 AND eo:cloud_cover <= 10 AND datetime >= TIMESTAMP('2021-04-08T04:39:23Z')\")"
]
},
{
"cell_type": "code",
"execution_count": 3,
"id": "c5f47281",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Example 1\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 2\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 3\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 4\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 5\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 6\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 7\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 8\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 9\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 10\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 11\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n",
"Example 12\n",
"*******parsed trees match***************\n",
"*******reconstructed json matches*******\n",
"____________________________________________________________\n"
]
}
],
"source": [
"from pygeofilter.parsers.cql2_text import parse as text_parse\n",
"from pygeofilter.parsers.cql2_json import parse as json_parse\n",
"from pygeofilter.backends.cql2_json import to_cql2\n",
"import orjson\n",
"import json\n",
"import pprint\n",
"def pp(j):\n",
" print(orjson.dumps(j))\n",
"with open('tests/parsers/cql2_json/fixtures.json') as f:\n",
" examples = json.load(f)\n",
"\n",
"for k, v in examples.items():\n",
" parsed_text = None\n",
" parsed_json = None\n",
" print (k)\n",
" t=v['text'].replace('filter=','')\n",
" j=v['json']\n",
" # print('\\t' + t)\n",
" # pp(orjson.loads(j))\n",
" # print('*****')\n",
" try:\n",
" parsed_text=text_parse(t)\n",
" parsed_json=json_parse(j)\n",
" if parsed_text == parsed_json:\n",
" print('*******parsed trees match***************')\n",
" else:\n",
" print(parsed_text)\n",
" print('-----')\n",
" print(parsed_json)\n",
" if parsed_json is None or parsed_text is None:\n",
" raise Exception\n",
" if to_cql2(parsed_text) == to_cql2(parsed_json):\n",
" print('*******reconstructed json matches*******')\n",
" else:\n",
" pp(to_cql2(parsed_text))\n",
" print('-----')\n",
" pp(to_cql2(parsed_json))\n",
" except Exception as e:\n",
" print(parsed_text)\n",
" print(parsed_json)\n",
" print(j)\n",
" traceback.print_exc(f\"Error: {e}\")\n",
" pass\n",
" print('____________________________________________________________')\n",
" "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "ac0bb004",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "pygeofilter",
"language": "python",
"name": "pygeofilter"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.10"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
pygeofilter-0.3.3/examples/test-solr-queries.py 0000664 0000000 0000000 00000021313 15121461241 0021642 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Magnar Martinsen
#
# ------------------------------------------------------------------------------
# Copyright (C) 2025 Norwegian Meteorological Institute
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
"""Sample script to test Solr queries"""
from pygeofilter.backends.solr import to_filter
from pygeofilter.parsers.ecql import parse
# AND
print('Testing AND')
ast = parse("title = 'test' AND description = 'test2'")
print('AST AND: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter AND: ', solr_filter)
print('\n')
# OR
print('Testing OR')
ast = parse("title = 'test' OR description = 'test2'")
print('AST OR: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter OR: ', solr_filter)
print('\n')
# =
print('Testing Equals =')
ast = parse("int_attribute = 5")
print('AST =: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter =: ', solr_filter)
print('\n')
# <>
print('Testing NOT EQUAL <>')
ast = parse("int_attribute <> 0.0")
print('AST <>: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter <>: ', solr_filter)
print('\n')
# <
print('Testing LessThan <')
ast = parse("float_attribute < 6")
print('AST <: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter <: ', solr_filter)
print('\n')
# >
print('Testing GraterThan >')
ast = parse("float_attribute > 6")
print('AST >: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter >: ', solr_filter)
print('\n')
# <=
print('Testing LessEqual <=')
ast = parse("int_attribute <= 6")
print('AST <=: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter <=: ', solr_filter)
print('\n')
# >=
print('Testing LessEqual >=')
ast = parse("float_attribute >= 8")
print('AST >=: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter >=: ', solr_filter)
print('\n')
# Combination AND
print('Testing Combination AND')
ast = parse("int_attribute = 5 AND float_attribute < 6.0")
print('AST Combination AND: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter Combination AND: ', solr_filter)
print('\n')
# Combination OR
print('Testing Combination OR')
ast = parse("int_attribute = 6 OR float_attribute < 6.0")
print('AST Combination OR: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter Combination OR: ', solr_filter)
print('\n')
# Between
print('Testing BETWEEN')
ast = parse("float_attribute BETWEEN -1 AND 1")
print('AST BETWEEN: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter BETWEEN: ', solr_filter)
print('\n')
# NOT Between
print('Testing NOT BETWEEN')
ast = parse("int_attribute NOT BETWEEN 4 AND 6")
print('AST NOT BETWEEN: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter NOT BETWEEN: ', solr_filter)
print('\n')
# NOT Between
print('Testing NOT BETWEEN')
ast = parse("int_attribute NOT BETWEEN 4 AND 6")
print('AST NOT BETWEEN: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter NOT BETWEEN: ', solr_filter)
print('\n')
# IS_NULL
print('Testing IS_NULL')
ast = parse("maybe_str_attribute IS NULL")
print('AST IS_NULL: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter IS_NULL: ', solr_filter)
print('\n')
# IS_NOT_NULL
print('Testing IS_NOT_NULL')
ast = parse("maybe_str_attribute IS NOT NULL")
print('AST IS_NOT_NULL: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter IS_NOT_NULL: ', solr_filter)
print('\n')
# IS_IN
print('Testing IN')
ast = parse("int_attribute IN ( 1, 2, 3, 4, 5 )")
print('AST IN: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter IN: ', solr_filter)
print('\n')
# IS_NOT_IN
print('Testing NOT IN')
ast = parse("int_attribute NOT IN ( 1, 2, 3, 4, 5 )")
print('AST NOT IN: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter NOT IN: ', solr_filter)
print('\n')
# LIKE
print('Testing LIKE')
ast = parse("str_attribute LIKE 'this is a test'")
print('AST LIKE: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter LIKE: ', solr_filter)
print('\n')
# LIKE %
print('Testing LIKE %')
ast = parse("str_attribute LIKE 'this is % test'")
print('AST LIKE %: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter LIKE %: ', solr_filter)
print('\n')
# NOT LIKE %
print('Testing NOT LIKE %')
ast = parse("str_attribute NOT LIKE '% another test'")
print('AST NOT LIKE %: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter NOT LIKE %: ', solr_filter)
print('\n')
# NOT LIKE .
print('Testing NOT LIKE .')
ast = parse("str_attribute NOT LIKE 'this is . test'")
print('AST NOT LIKE .: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter NOT LIKE .: ', solr_filter)
print('\n')
# ILIKE .
print('Testing ILIKE .')
ast = parse("str_attribute ILIKE 'THIS IS . TEST'")
print('AST ILIKE .: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter ILIKE .: ', solr_filter)
print('\n')
# ILIKE %
print('Testing ILIKE %')
ast = parse("str_attribute ILIKE 'THIS IS % TEST'")
print('AST ILIKE %: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter ILIKE %: ', solr_filter)
print('\n')
# EXISTS
print('Testing EXISTS')
ast = parse("extra_attr EXISTS")
print('AST EXISTS: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter EXISTS: ', solr_filter)
print('\n')
# DOES-NOT-EXIST
print('Testing DOES-NOT-EXIST')
ast = parse("extra_attr DOES-NOT-EXIST")
print('AST DOES-NOT-EXIST: ', ast)
solr_filter = to_filter(ast)
print('SOLR filter DOES-NOT-EXIST: ', solr_filter)
print('\n')
# Testing temporal BEFORE
print('Testing datetime attribute BEFORE')
ast = parse("datetime_attribute BEFORE 2000-01-01T00:00:05.00Z")
print('AST BEFORE:', ast)
solr_filter = to_filter(ast)
print('datetime attribute BEFORE: ', solr_filter)
print('\n')
# Testing temporal AFTER
print('Testing datetime attribute AFTER')
ast = parse("datetime_attribute AFTER 2000-01-01T00:00:05.00Z")
print('AST AFTER:', ast)
solr_filter = to_filter(ast)
print('datetime attribute AFTER: ', solr_filter)
print('\n')
# Testing temporal AFTER
# print('Testing datetime attribute DISJOINT')
# ast = ast.TimeDisjoint(
# ast.Attribute("datetime_attribute"),
# [
# parse_datetime("2000-01-01T00:00:05.00Z"),
# parse_datetime("2000-01-01T00:00:15.00Z"),
# ],
# )
# print('AST AFTER:', ast)
# solr_filter = to_filter(ast)
# print('datetime attribute AFTER: ', solr_filter)
# print('\n')
# Test spatial Intersects
print('Testing Spatial Intersects')
ast = parse("INTERSECTS(geometry, ENVELOPE (0.0 1.0 0.0 1.0))")
print('AST Spatial Intersects:', ast)
solr_filter = to_filter(ast)
print('Spatial Intersects: ', solr_filter)
print('\n')
# Test spatial Disjoint
print('Testing Spatial Disjoint')
ast = parse("DISJOINT(geometry, ENVELOPE (0.0 1.0 0.0 1.0))")
print('AST Spatial Disjoint:', ast)
solr_filter = to_filter(ast)
print('Spatial Disjoint: ', solr_filter)
print('\n')
# Test spatial Within
print('Testing Spatial Within')
ast = parse("WITHIN(geometry, ENVELOPE (0.0 1.0 0.0 1.0))")
print('AST Spatial Within:', ast)
solr_filter = to_filter(ast)
print('Spatial Within: ', solr_filter)
print('\n')
# Test spatial Contains
print('Testing Spatial Contains')
ast = parse("CONTAINS(geometry, ENVELOPE (0.0 1.0 0.0 1.0))")
print('AST Spatial Contains:', ast)
solr_filter = to_filter(ast)
print('Spatial Contains: ', solr_filter)
print('\n')
# Test spatial Equals
print('Testing Spatial Equals')
ast = parse("EQUALS(geometry, ENVELOPE (0.0 1.0 0.0 1.0))")
print('AST Spatial Equals:', ast)
solr_filter = to_filter(ast)
print('Spatial Equals: ', solr_filter)
print('\n')
# Test spatial BBOX
print('Testing Spatial BBOX')
ast = parse("BBOX(center, 2, 2, 3, 3)")
print('AST Spatial BBOX:', ast)
solr_filter = to_filter(ast)
print('Spatial BBOX: ', solr_filter)
print('\n')
pygeofilter-0.3.3/execute-tests.sh 0000775 0000000 0000000 00000000240 15121461241 0017200 0 ustar 00root root 0000000 0000000 #!/bin/bash
pushd $(dirname $0)
dco="docker compose -f docker-compose.test.yml"
$dco build
$dco run --rm tester pytest
exit_code=$?
$dco down
exit $exit_code
pygeofilter-0.3.3/pygeofilter/ 0000775 0000000 0000000 00000000000 15121461241 0016374 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/pygeofilter/__init__.py 0000664 0000000 0000000 00000003004 15121461241 0020502 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Fabian Schindler
#
# ------------------------------------------------------------------------------
# Copyright (C) 2019 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
from .version import __version__
__all__ = ["__version__"]
pygeofilter-0.3.3/pygeofilter/ast.py 0000664 0000000 0000000 00000043063 15121461241 0017543 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Fabian Schindler
#
# ------------------------------------------------------------------------------
# Copyright (C) 2019 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
from dataclasses import dataclass
from enum import Enum
from typing import ClassVar, List, Optional, Union
from . import values
AstType = Union["Node", values.ValueType, list]
ScalarAstType = Union["Node", int, float]
SpatialAstType = Union["Node", values.SpatialValueType]
TemporalAstType = Union["Node", values.TemporalValueType]
ArrayAstType = Union["Node", List[AstType]]
class Node:
"""The base class for all other nodes to display the AST of CQL."""
inline: bool = False
def get_sub_nodes(self) -> List[AstType]:
"""Get a list of sub-node of this node.
:return: a list of all sub-nodes
:rtype: list[Node]
"""
return []
def get_template(self) -> str:
"""Get a template string (using the ``.format`` method)
to represent the current node and sub-nodes. The template string
must provide a template replacement for each sub-node reported by
:func:`~pygeofilter.ast.Node.get_sub_nodes`.
:return: the template to render
"""
raise NotImplementedError
def __eq__(self, other) -> bool:
if not isinstance(other, self.__class__):
return False
self_dict = {
k: v.__geo_interface__ if hasattr(v, "__geo_interface__") else v
for k, v in self.__dict__.items()
}
other_dict = {
k: v.__geo_interface__ if hasattr(v, "__geo_interface__") else v
for k, v in other.__dict__.items()
}
return self_dict == other_dict
class Condition(Node):
"""The base class for all nodes representing a condition"""
pass
class Not(Condition):
"""
Node class to represent a negation condition.
:ivar sub_node: the condition node to be negated
:type sub_node: Node
"""
def __init__(self, sub_node: Node):
self.sub_node = sub_node
def get_sub_nodes(self) -> List[AstType]:
"""Returns the sub-node for the negated condition."""
return [self.sub_node]
def get_template(self) -> str:
return "NOT {}"
class CombinationOp(Enum):
AND = "AND"
OR = "OR"
@dataclass
class Combination(Condition):
"""Node class to represent a condition to combine two other conditions
using either AND or OR.
"""
lhs: Node
rhs: Node
op: ClassVar[CombinationOp]
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.rhs]
def get_template(self) -> str:
return f"{{}} {self.op.name} {{}}"
@classmethod
def from_items(cls, first, *rest) -> Node:
result = first
for item in rest:
result = cls(result, item)
return result
@dataclass
class And(Combination):
op: ClassVar[CombinationOp] = CombinationOp.AND
@dataclass
class Or(Combination):
op: ClassVar[CombinationOp] = CombinationOp.OR
class Predicate(Node):
"""The base class for all nodes representing a predicate"""
pass
class ComparisonOp(Enum):
EQ = "="
NE = "<>"
LT = "<"
LE = "<="
GT = ">"
GE = ">="
@dataclass
class Comparison(Predicate):
"""Node class to represent a comparison predicate: to compare two
expressions using a comparison operation.
"""
lhs: ScalarAstType
rhs: ScalarAstType
op: ClassVar[ComparisonOp]
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.rhs]
def get_template(self) -> str:
return f"{{}} {self.op.value} {{}}"
@dataclass
class Equal(Comparison):
op: ClassVar[ComparisonOp] = ComparisonOp.EQ
@dataclass
class NotEqual(Comparison):
op: ClassVar[ComparisonOp] = ComparisonOp.NE
@dataclass
class LessThan(Comparison):
op: ClassVar[ComparisonOp] = ComparisonOp.LT
@dataclass
class LessEqual(Comparison):
op: ClassVar[ComparisonOp] = ComparisonOp.LE
@dataclass
class GreaterThan(Comparison):
op: ClassVar[ComparisonOp] = ComparisonOp.GT
@dataclass
class GreaterEqual(Comparison):
op: ClassVar[ComparisonOp] = ComparisonOp.GE
@dataclass
class Between(Predicate):
"""Node class to represent a BETWEEN predicate: to check whether an
expression value within a range.
"""
lhs: Node
low: ScalarAstType
high: ScalarAstType
not_: bool
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.low, self.high]
def get_template(self) -> str:
return f"{{}} {'NOT ' if self.not_ else ''}BETWEEN {{}} AND {{}}"
@dataclass
class Like(Predicate):
"""Node class to represent a wildcard sting matching predicate."""
lhs: Node
pattern: str
nocase: bool
wildcard: str
singlechar: str
escapechar: str
not_: bool
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs]
def get_template(self) -> str:
return (
f"{{}} {'NOT ' if self.not_ else ''}"
f"{'I' if self.nocase else ''}LIKE '{self.pattern}'"
# TODO wildcard, singlechar, escapechar
)
@dataclass
class In(Predicate):
"""Node class to represent list checking predicate."""
lhs: AstType
sub_nodes: List[AstType]
not_: bool
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs] + list(self.sub_nodes)
def get_template(self) -> str:
return (
f"{{}} {'NOT ' if self.not_ else ''}IN "
f"{', '.join(['{}'] * len(self.sub_nodes))}"
)
@dataclass
class IsNull(Predicate):
"""Node class to represent null check predicate."""
lhs: AstType
not_: bool
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs]
def get_template(self) -> str:
return f"{{}} IS {('NOT ' if self.not_ else '')}NULL"
@dataclass
class Exists(Predicate):
lhs: AstType
not_: bool
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs]
def get_template(self) -> str:
return f"{{}} {('DOES-NOT-EXIST' if self.not_ else 'EXISTS')}"
@dataclass
class Include(Predicate):
not_: bool
def get_template(self) -> str:
return "EXCLUDE" if self.not_ else "INCLUDE"
# https://portal.ogc.org/files/96288#enhanced-temporal-operators
# BEFORE <======> <-----> AFTER
# MEETS <----------> METBY
# TOVERLAPS <--------------> OVERLAPPEDBY
# BEGINS <------------------> BEGUNBY
# DURING <----------------------> TCONTAINS
# TENDS <----------> ENDEDBY
# TEQUALS <------> TEQUALS
# DISJOINT: If a proper interval T1 is intervalDisjoint another proper
# interval T2,then the beginning of T1 is after the end of T2, or the end of
# T1 is before the beginning of T2, i.e. the intervals do not overlap in any
# way, but their ordering relationship is not known.
# https://github.com/geotools/geotools/blob/main/modules/library/cql/ECQL.md#temporal-predicate
# BEFORE_OR_DURING <----->
# DURING_OR_AFTER <----->
class TemporalComparisonOp(Enum):
DISJOINT = "DISJOINT"
AFTER = "AFTER"
BEFORE = "BEFORE"
BEGINS = "BEGINS"
BEGUNBY = "BEGUNBY"
TCONTAINS = "TCONTAINS"
DURING = "DURING"
ENDEDBY = "ENDEDBY"
ENDS = "ENDS"
TEQUALS = "TEQUALS"
MEETS = "MEETS"
METBY = "METBY"
TOVERLAPS = "TOVERLAPS"
OVERLAPPEDBY = "OVERLAPPEDBY"
BEFORE_OR_DURING = "BEFORE OR DURING"
DURING_OR_AFTER = "DURING OR AFTER"
@dataclass
class TemporalPredicate(Predicate):
"""Node class to represent temporal predicate."""
lhs: TemporalAstType
rhs: TemporalAstType
op: ClassVar[TemporalComparisonOp]
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.rhs]
def get_template(self) -> str:
return f"{{}} {self.op} {{}}"
@dataclass
class TimeDisjoint(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.DISJOINT
@dataclass
class TimeAfter(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.AFTER
@dataclass
class TimeBefore(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.BEFORE
@dataclass
class TimeBegins(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.BEGINS
@dataclass
class TimeBegunBy(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.BEGUNBY
@dataclass
class TimeContains(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.TCONTAINS
@dataclass
class TimeDuring(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.DURING
@dataclass
class TimeEndedBy(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.ENDEDBY
@dataclass
class TimeEnds(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.ENDS
@dataclass
class TimeEquals(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.TEQUALS
@dataclass
class TimeMeets(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.MEETS
@dataclass
class TimeMetBy(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.METBY
@dataclass
class TimeOverlaps(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.TOVERLAPS
@dataclass
class TimeOverlappedBy(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.OVERLAPPEDBY
@dataclass
class TimeBeforeOrDuring(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.BEFORE_OR_DURING
@dataclass
class TimeDuringOrAfter(TemporalPredicate):
op: ClassVar[TemporalComparisonOp] = TemporalComparisonOp.DURING_OR_AFTER
class ArrayComparisonOp(Enum):
AEQUALS = "AEQUALS"
ACONTAINS = "ACONTAINS"
ACONTAINEDBY = "ACONTAINEDBY"
AOVERLAPS = "AOVERLAPS"
@dataclass
class ArrayPredicate(Predicate):
"""Node class to represent array predicates."""
lhs: ArrayAstType
rhs: ArrayAstType
op: ClassVar[ArrayComparisonOp]
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.rhs]
def get_template(self) -> str:
return f"{{}} {self.op} {{}}"
@dataclass
class ArrayEquals(ArrayPredicate):
op: ClassVar[ArrayComparisonOp] = ArrayComparisonOp.AEQUALS
@dataclass
class ArrayContains(ArrayPredicate):
op: ClassVar[ArrayComparisonOp] = ArrayComparisonOp.ACONTAINS
@dataclass
class ArrayContainedBy(ArrayPredicate):
op: ClassVar[ArrayComparisonOp] = ArrayComparisonOp.ACONTAINEDBY
@dataclass
class ArrayOverlaps(ArrayPredicate):
op: ClassVar[ArrayComparisonOp] = ArrayComparisonOp.AOVERLAPS
class SpatialComparisonOp(Enum):
INTERSECTS = "INTERSECTS"
DISJOINT = "DISJOINT"
CONTAINS = "CONTAINS"
WITHIN = "WITHIN"
TOUCHES = "TOUCHES"
CROSSES = "CROSSES"
OVERLAPS = "OVERLAPS"
EQUALS = "EQUALS"
@dataclass
class SpatialComparisonPredicate(Predicate):
"""Node class to represent spatial relation predicate."""
lhs: SpatialAstType
rhs: SpatialAstType
op: ClassVar[SpatialComparisonOp]
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.rhs]
def get_template(self) -> str:
return f"{self.op.name}({{}}, {{}})"
@dataclass
class GeometryIntersects(SpatialComparisonPredicate):
op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.INTERSECTS
@dataclass
class GeometryDisjoint(SpatialComparisonPredicate):
op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.DISJOINT
@dataclass
class GeometryContains(SpatialComparisonPredicate):
op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.CONTAINS
@dataclass
class GeometryWithin(SpatialComparisonPredicate):
op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.WITHIN
@dataclass
class GeometryTouches(SpatialComparisonPredicate):
op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.TOUCHES
@dataclass
class GeometryCrosses(SpatialComparisonPredicate):
op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.CROSSES
@dataclass
class GeometryOverlaps(SpatialComparisonPredicate):
op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.OVERLAPS
@dataclass
class GeometryEquals(SpatialComparisonPredicate):
op: ClassVar[SpatialComparisonOp] = SpatialComparisonOp.EQUALS
@dataclass
class Relate(Predicate):
"""Node class to represent spatial relation predicate."""
lhs: SpatialAstType
rhs: SpatialAstType
pattern: str
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.rhs]
def get_template(self) -> str:
return f"RELATE({{}}, {{}}, '{self.pattern}')"
class SpatialDistanceOp(Enum):
DWITHIN = "DWITHIN"
BEYOND = "BEYOND"
@dataclass
class SpatialDistancePredicate(Predicate):
"""Node class to represent spatial relation predicate."""
lhs: SpatialAstType
rhs: SpatialAstType
distance: float
units: str
op: ClassVar[SpatialDistanceOp]
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.rhs]
def get_template(self) -> str:
return f"{self.op.name}({{}}, {{}}, {self.distance}, '{self.units}')"
@dataclass
class DistanceWithin(SpatialDistancePredicate):
op: ClassVar[SpatialDistanceOp] = SpatialDistanceOp.DWITHIN
@dataclass
class DistanceBeyond(SpatialDistancePredicate):
op: ClassVar[SpatialDistanceOp] = SpatialDistanceOp.BEYOND
@dataclass
class BBox(Predicate):
"""Node class to represent a bounding box predicate."""
lhs: SpatialAstType
minx: float
miny: float
maxx: float
maxy: float
crs: Optional[str] = None
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs]
def get_template(self) -> str:
return (
f"BBOX({{}}, {self.minx}, {self.miny}, {self.maxx}, "
f"{self.maxy}, {repr(self.crs)})"
)
class Expression(Node):
"""The base class for all nodes representing expressions"""
pass
class Attribute(Expression):
"""Node class to represent attribute lookup expressions
:ivar name: the name of the attribute to be accessed
:type name: str
"""
inline = True
def __init__(self, name):
self.name = name
def __repr__(self):
return f"ATTRIBUTE {self.name}"
class ArithmeticOp(Enum):
ADD = "+"
SUB = "-"
MUL = "*"
DIV = "/"
@dataclass
class Arithmetic(Expression):
"""Node class to represent arithmetic operation expressions with two
sub-expressions and an operator.
"""
lhs: ScalarAstType
rhs: ScalarAstType
op: ClassVar[ArithmeticOp]
def get_sub_nodes(self) -> List[AstType]:
return [self.lhs, self.rhs]
def get_template(self) -> str:
return f"{{}} {self.op.value} {{}}"
@dataclass
class Add(Arithmetic):
op: ClassVar[ArithmeticOp] = ArithmeticOp.ADD
@dataclass
class Sub(Arithmetic):
op: ClassVar[ArithmeticOp] = ArithmeticOp.SUB
@dataclass
class Mul(Arithmetic):
op: ClassVar[ArithmeticOp] = ArithmeticOp.MUL
@dataclass
class Div(Arithmetic):
op: ClassVar[ArithmeticOp] = ArithmeticOp.DIV
@dataclass
class Function(Expression):
"""Node class to represent function invocations."""
name: str
arguments: List[AstType]
def get_sub_nodes(self) -> List[AstType]:
return self.arguments
def get_template(self) -> str:
return f"{self.name} ({', '.join(['{}'] * len(self.arguments))})"
def indent(text: str, amount: int, ch: str = " ") -> str:
padding = amount * ch
return "".join(padding + line for line in text.splitlines(True))
def get_repr(node: Node, indent_amount: int = 0, indent_incr: int = 4) -> str:
"""Get a debug representation of the given AST node. ``indent_amount``
and ``indent_incr`` are for the recursive call and don't need to be
passed.
"""
sub_nodes = node.get_sub_nodes()
template = node.get_template()
args = []
for sub_node in sub_nodes:
if isinstance(sub_node, Node) and not sub_node.inline:
args.append(
"(\n{}\n)".format(
indent(
get_repr(sub_node, indent_amount + indent_incr, indent_incr),
indent_amount + indent_incr,
)
)
)
else:
args.append(repr(sub_node))
return template.format(*args)
pygeofilter-0.3.3/pygeofilter/backends/ 0000775 0000000 0000000 00000000000 15121461241 0020146 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/pygeofilter/backends/__init__.py 0000664 0000000 0000000 00000000000 15121461241 0022245 0 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/pygeofilter/backends/cql2_json/ 0000775 0000000 0000000 00000000000 15121461241 0022040 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/pygeofilter/backends/cql2_json/__init__.py 0000664 0000000 0000000 00000000065 15121461241 0024152 0 ustar 00root root 0000000 0000000 from .evaluate import to_cql2
__all__ = ["to_cql2"]
pygeofilter-0.3.3/pygeofilter/backends/cql2_json/evaluate.py 0000664 0000000 0000000 00000010340 15121461241 0024216 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Fabian Schindler ,
# David Bitner
#
# ------------------------------------------------------------------------------
# Copyright (C) 2021 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
import json
from datetime import date, datetime
from typing import Dict, Optional
from ... import ast, values
from ...cql2 import get_op
from ..evaluator import Evaluator, handle
def json_serializer(obj):
if isinstance(obj, (datetime, date)):
return obj.isoformat()
if hasattr(obj, "name"):
return obj.name
raise TypeError(f"{obj} with type {type(obj)} is not serializable.")
class CQL2Evaluator(Evaluator):
def __init__(
self,
attribute_map: Optional[Dict[str, str]],
function_map: Optional[Dict[str, str]],
):
self.attribute_map = attribute_map
self.function_map = function_map
@handle(
ast.Condition,
ast.Comparison,
ast.TemporalPredicate,
ast.SpatialComparisonPredicate,
ast.Arithmetic,
ast.ArrayPredicate,
subclasses=True,
)
def comparison(self, node, *args):
op = get_op(node)
return {"op": op, "args": [*args]}
@handle(ast.Between)
def between(self, node, lhs, low, high):
return {"op": "between", "args": [lhs, [low, high]]}
@handle(ast.Like)
def like(self, node, *subargs):
return {"op": "like", "args": [subargs[0], node.pattern]}
@handle(ast.IsNull)
def isnull(self, node, arg):
ret = {"op": "isNull", "args": [arg]}
if node.not_:
ret = {"op": "not", "args": [ret]}
return ret
@handle(ast.Function)
def function(self, node, *args):
name = node.name.lower()
if name == "lower":
ret = {"lower": args[0]}
elif name == "upper":
ret = {"upper": args[0]}
else:
ret = {"function": name, "args": [*args]}
return ret
@handle(ast.In)
def in_(self, node, lhs, *options):
return {"op": "in", "args": [lhs, options]}
@handle(ast.Attribute)
def attribute(self, node: ast.Attribute):
return {"property": node.name}
@handle(values.Interval)
def interval(self, node: values.Interval, start, end):
return {"interval": [start, end]}
@handle(datetime)
def datetime(self, node: ast.Attribute):
return {"timestamp": node.name}
@handle(*values.LITERALS)
def literal(self, node):
return node
@handle(values.Geometry)
def geometry(self, node: values.Geometry):
return node.__geo_interface__
@handle(values.Envelope)
def envelope(self, node: values.Envelope):
return node.__geo_interface__
def to_cql2(
root: ast.Node,
field_mapping: Optional[Dict[str, str]] = None,
function_map: Optional[Dict[str, str]] = None,
) -> str:
return json.dumps(
CQL2Evaluator(field_mapping, function_map).evaluate(root),
default=json_serializer,
)
pygeofilter-0.3.3/pygeofilter/backends/django/ 0000775 0000000 0000000 00000000000 15121461241 0021410 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/pygeofilter/backends/django/__init__.py 0000664 0000000 0000000 00000000071 15121461241 0023517 0 ustar 00root root 0000000 0000000 from .evaluate import to_filter
__all__ = ["to_filter"]
pygeofilter-0.3.3/pygeofilter/backends/django/evaluate.py 0000664 0000000 0000000 00000012656 15121461241 0023602 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Fabian Schindler
#
# ------------------------------------------------------------------------------
# Copyright (C) 2019 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
import json
from django.contrib.gis.geos import GEOSGeometry, Polygon
from ... import ast, values
from ..evaluator import Evaluator, handle
from . import filters
class DjangoFilterEvaluator(Evaluator):
def __init__(self, field_mapping, mapping_choices):
self.field_mapping = field_mapping
self.mapping_choices = mapping_choices
@handle(ast.Not)
def not_(self, node, sub):
return filters.negate(sub)
@handle(ast.And, ast.Or)
def combination(self, node, lhs, rhs):
return filters.combine((lhs, rhs), node.op.value)
@handle(ast.Comparison, subclasses=True)
def comparison(self, node, lhs, rhs):
return filters.compare(lhs, rhs, node.op.value, self.mapping_choices)
@handle(ast.Between)
def between(self, node, lhs, low, high):
return filters.between(lhs, low, high, node.not_)
@handle(ast.Like)
def like(self, node, lhs):
return filters.like(
lhs, node.pattern, node.nocase, node.not_, self.mapping_choices
)
@handle(ast.In)
def in_(self, node, lhs, *options):
return filters.contains(lhs, options, node.not_, self.mapping_choices)
@handle(ast.IsNull)
def null(self, node, lhs):
return filters.null(lhs, node.not_)
# @handle(ast.ExistsPredicateNode)
# def exists(self, node, lhs):
# if self.use_getattr:
# result = hasattr(self.obj, node.lhs.name)
# else:
# result = lhs in self.obj
# if node.not_:
# result = not result
# return result
@handle(ast.TemporalPredicate, subclasses=True)
def temporal(self, node, lhs, rhs):
return filters.temporal(
lhs,
rhs,
node.op.value,
)
@handle(ast.SpatialComparisonPredicate, subclasses=True)
def spatial_operation(self, node, lhs, rhs):
return filters.spatial(
lhs,
rhs,
node.op.name,
)
@handle(ast.Relate)
def spatial_pattern(self, node, lhs, rhs):
return filters.spatial_relate(
lhs,
rhs,
pattern=node.pattern,
)
@handle(ast.SpatialDistancePredicate, subclasses=True)
def spatial_distance(self, node, lhs, rhs):
return filters.spatial_distance(
lhs,
rhs,
node.op.value,
distance=node.distance,
units=node.units,
)
@handle(ast.BBox)
def bbox(self, node, lhs):
return filters.bbox(lhs, node.minx, node.miny, node.maxx, node.maxy, node.crs)
@handle(ast.Attribute)
def attribute(self, node):
return filters.attribute(node.name, self.field_mapping)
@handle(ast.Arithmetic, subclasses=True)
def arithmetic(self, node, lhs, rhs):
return filters.arithmetic(lhs, rhs, node.op.value)
# TODO: map functions
# @handle(ast.FunctionExpressionNode)
# def function(self, node, *arguments):
# return self.function_map[node.name](*arguments)
@handle(*values.LITERALS)
def literal(self, node):
return filters.literal(node)
@handle(values.Interval)
def interval(self, node, start, end):
return filters.literal((start, end))
@handle(values.Geometry)
def geometry(self, node):
return GEOSGeometry(json.dumps(node.__geo_interface__))
@handle(values.Envelope)
def envelope(self, node):
return Polygon.from_bbox((node.x1, node.y1, node.x2, node.y2))
def to_filter(root, field_mapping=None, mapping_choices=None):
"""Helper function to translate ECQL AST to Django Query expressions.
:param ast: the abstract syntax tree
:param field_mapping: a dict mapping from the filter name to the Django
field lookup.
:param mapping_choices: a dict mapping field lookups to choices.
:type ast: :class:`Node`
:returns: a Django query object
:rtype: :class:`django.db.models.Q`
"""
return DjangoFilterEvaluator(field_mapping, mapping_choices).evaluate(root)
pygeofilter-0.3.3/pygeofilter/backends/django/filters.py 0000664 0000000 0000000 00000042367 15121461241 0023446 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Fabian Schindler
#
# ------------------------------------------------------------------------------
# Copyright (C) 2019 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
from datetime import datetime, timedelta
from functools import reduce
from operator import add, and_, mul, or_, sub, truediv
from typing import Dict, List, Optional, Union
from django.contrib.gis.gdal import SpatialReference
from django.contrib.gis.geos import Polygon
from django.contrib.gis.measure import D
from django.db.models import F, Q, Value
from django.db.models.expressions import Expression
ArithmeticType = Union[Expression, F, Value, int, float]
# ------------------------------------------------------------------------------
# Filters
# ------------------------------------------------------------------------------
def combine(sub_filters: List[Q], combinator: str = "AND") -> Q:
"""Combine filters using a logical combinator"""
op = and_ if combinator == "AND" else or_
return reduce(lambda acc, q: op(acc, q) if acc else q, sub_filters)
def negate(sub_filter: Q) -> Q:
"""Negate a filter, opposing its meaning."""
return ~sub_filter
OP_TO_COMP = {"<": "lt", "<=": "lte", ">": "gt", ">=": "gte", "<>": None, "=": "exact"}
INVERT_COMP: Dict[Optional[str], str] = {
"lt": "gt",
"lte": "gte",
"gt": "lt",
"gte": "lte",
}
def compare(
lhs: Union[F, Value],
rhs: Union[F, Value],
op: str,
mapping_choices: Optional[Dict[str, Dict[str, str]]] = None,
) -> Q:
"""Compare a filter with an expression using a comparison operation
:param lhs: the field to compare
:type lhs: :class:`django.db.models.F`
:param rhs: the filter expression
:type rhs: :class:`django.db.models.F`
:param op: a string denoting the operation. one of ``"<"``, ``"<="``,
``">"``, ``">="``, ``"<>"``, ``"="``
:type op: str
:param mapping_choices: a dict to lookup potential choices for a
certain field.
:type mapping_choices: dict[str, str]
:return: a comparison expression object
:rtype: :class:`django.db.models.Q`
"""
comp = OP_TO_COMP[op]
# if the left hand side is not a field reference, the comparison
# can be be inverted to try if the right hand side is a field
# reference.
if not isinstance(lhs, F):
lhs, rhs = rhs, lhs
comp = INVERT_COMP.get(comp, comp)
# if neither lhs and rhs are fields, we have to fail here
if not isinstance(lhs, F):
raise ValueError(f"Unable to compare non-field {lhs}")
field_name = lhs.name
if mapping_choices and field_name in mapping_choices:
try:
if isinstance(rhs, str):
rhs = mapping_choices[field_name][rhs]
elif hasattr(rhs, "value"):
rhs = Value(mapping_choices[field_name][rhs.value])
except KeyError as e:
raise AssertionError("Invalid field value %s" % e)
if comp:
return Q(**{"%s__%s" % (lhs.name, comp): rhs})
return ~Q(**{field_name: rhs})
def between(
lhs: F, low: Union[F, Value], high: Union[F, Value], not_: bool = False
) -> Q:
"""Create a filter to match elements that have a value within a certain
range.
:param lhs: the field to compare
:type lhs: :class:`django.db.models.F`
:param low: the lower value of the range
:type low:
:param high: the upper value of the range
:type high:
:param not_: whether the range shall be inclusive (the default) or
exclusive
:type not_: bool
:return: a comparison expression object
:rtype: :class:`django.db.models.Q`
"""
q = Q(**{"%s__range" % lhs.name: (low, high)})
return ~q if not_ else q
def like(
lhs: F,
pattern: str,
nocase: bool = False,
not_: bool = False,
mapping_choices: Optional[Dict[str, Dict[str, str]]] = None,
) -> Q:
"""Create a filter to filter elements according to a string attribute
using wildcard expressions.
:param lhs: the field to compare
:type lhs: :class:`django.db.models.F`
:param rhs: the wildcard pattern: a string containing any number of '%'
characters as wildcards.
:type rhs: str
:param case: whether the lookup shall be done case sensitively or not
:type case: bool
:param not_: whether the range shall be inclusive (the default) or
exclusive
:type not_: bool
:param mapping_choices: a dict to lookup potential choices for a
certain field.
:type mapping_choices: dict[str, str]
:return: a comparison expression object
:rtype: :class:`django.db.models.Q`
"""
parts = pattern.split("%")
length = len(parts)
if mapping_choices and lhs.name in mapping_choices:
# special case when choices are given for the field:
# compare statically and use 'in' operator to check if contained
cmp_av = [
(a, a.lower() if nocase else a) for a in mapping_choices[lhs.name].keys()
]
for idx, part in enumerate(parts):
if not part:
continue
cmp_p = part.lower() if nocase else part
if idx == 0 and length > 1: # startswith
cmp_av = [a for a in cmp_av if a[1].startswith(cmp_p)]
elif idx == 0: # exact matching
cmp_av = [a for a in cmp_av if a[1] == cmp_p]
elif idx == length - 1: # endswith
cmp_av = [a for a in cmp_av if a[1].endswith(cmp_p)]
else: # middle
cmp_av = [a for a in cmp_av if cmp_p in a[1]]
q = Q(
**{"%s__in" % lhs.name: [mapping_choices[lhs.name][a[0]] for a in cmp_av]}
)
else:
i = "i" if nocase else ""
q = None
for idx, part in enumerate(parts):
if not part:
continue
if idx == 0 and length > 1: # startswith
new_q = Q(**{"%s__%s" % (lhs.name, "%sstartswith" % i): part})
elif idx == 0: # exact matching
new_q = Q(**{"%s__%s" % (lhs.name, "%sexact" % i): part})
elif idx == length - 1: # endswith
new_q = Q(**{"%s__%s" % (lhs.name, "%sendswith" % i): part})
else: # middle
new_q = Q(**{"%s__%s" % (lhs.name, "%scontains" % i): part})
q = q & new_q if q else new_q
return ~q if not_ else q
def contains(
lhs: F,
items: List[Union[F, Value]],
not_: bool = False,
mapping_choices: Optional[Dict[str, Dict[str, str]]] = None,
) -> Q:
"""Create a filter to match elements attribute to be in a list of choices.
:param lhs: the field to compare
:type lhs: :class:`django.db.models.F`
:param items: a list of choices
:type items: list
:param not_: whether the range shall be inclusive (the default) or
exclusive
:type not_: bool
:param mapping_choices: a dict to lookup potential choices for a
certain field.
:type mapping_choices: dict[str, str]
:return: a comparison expression object
:rtype: :class:`django.db.models.Q`
"""
if mapping_choices is not None and lhs.name in mapping_choices:
def map_value(
item: Union[str, Value], choices: Dict[str, str]
) -> Union[str, Value]:
try:
if isinstance(item, str):
item = choices[item]
elif isinstance(item, Value):
item = Value(choices[item.value])
except KeyError as e:
raise AssertionError("Invalid field value %s" % e)
return item
items = [map_value(item, mapping_choices[lhs.name]) for item in items]
q = Q(**{"%s__in" % lhs.name: items})
return ~q if not_ else q
def null(lhs: F, not_: bool = False) -> Q:
"""Create a filter to match elements whose attribute is (not) null
:param lhs: the field to compare
:type lhs: :class:`django.db.models.F`
:param not_: whether the range shall be inclusive (the default) or
exclusive
:type not_: bool
:return: a comparison expression object
:rtype: :class:`django.db.models.Q`
"""
return Q(**{"%s__isnull" % lhs.name: not not_})
def temporal(lhs: F, time_or_period: Value, op: str) -> Q:
"""Create a temporal filter for the given temporal attribute.
:param lhs: the field to compare
:type lhs: :class:`django.db.models.F`
:param time_or_period: the time instant or time span to use as a filter
:type time_or_period: :class:`datetime.datetime` or a tuple of two
datetimes or a tuple of one datetime and one
:class:`datetime.timedelta`
:param op: the comparison operation. one of ``"BEFORE"``,
``"BEFORE OR DURING"``, ``"DURING"``, ``"DURING OR AFTER"``,
``"AFTER"``.
:type op: str
:return: a comparison expression object
:rtype: :class:`django.db.models.Q`
"""
assert op in ("BEFORE", "BEFORE OR DURING", "DURING", "DURING OR AFTER", "AFTER")
time_or_period = time_or_period.value
low: Union[datetime, timedelta, None] = None
high: Union[datetime, timedelta, None] = None
if op in ("BEFORE", "AFTER"):
assert isinstance(time_or_period, datetime)
if op == "BEFORE":
high = time_or_period
else:
low = time_or_period
else:
low, high = time_or_period
low = low.value if isinstance(low, Value) else low
high = high.value if isinstance(high, Value) else high
assert isinstance(low, datetime) or isinstance(high, datetime)
if isinstance(low, timedelta) and isinstance(high, datetime):
low = high - low
if isinstance(low, datetime) and isinstance(high, timedelta):
high = low + high
if low and high:
return Q(**{"%s__range" % lhs.name: (low, high)})
elif low:
return Q(**{"%s__gte" % lhs.name: low})
else:
return Q(**{"%s__lte" % lhs.name: high})
def time_interval(
time_or_period: Value,
containment: str = "overlaps",
begin_time_field: str = "begin_time",
end_time_field: str = "end_time",
) -> Q:
""" """
gt_op = "__gte"
lt_op = "__lte"
is_slice = len(time_or_period) == 1
if len(time_or_period) == 1:
is_slice = True
value = time_or_period[0]
else:
is_slice = False
low, high = time_or_period
if is_slice or (high == low and containment == "overlaps"):
return Q(
**{
begin_time_field + "__lte": time_or_period[0],
end_time_field + "__gte": time_or_period[0],
}
)
elif high == low:
return Q(**{begin_time_field + "__gte": value, end_time_field + "__lte": value})
else:
q = Q()
# check if the temporal bounds must be strictly contained
if containment == "contains":
if high is not None:
q &= Q(**{end_time_field + lt_op: high})
if low is not None:
q &= Q(**{begin_time_field + gt_op: low})
# or just overlapping
else:
if high is not None:
q &= Q(**{begin_time_field + lt_op: high})
if low is not None:
q &= Q(**{end_time_field + gt_op: low})
return q
UNITS_LOOKUP = {"kilometers": "km", "meters": "m"}
INVERT_SPATIAL_OP = {
"WITHIN": "CONTAINS",
"CONTAINS": "WITHIN",
}
def spatial(
lhs: Union[F, Value],
rhs: Union[F, Value],
op: str,
pattern: Optional[str] = None,
distance: Optional[float] = None,
units: Optional[str] = None,
) -> Q:
"""Create a spatial filter for the given spatial attribute.
:param lhs: the field to compare
:type lhs: :class:`django.db.models.F`
:param rhs: the time instant or time span to use as a filter
:type rhs:
:param op: the comparison operation. one of ``"INTERSECTS"``,
``"DISJOINT"``, `"CONTAINS"``, ``"WITHIN"``,
``"TOUCHES"``, ``"CROSSES"``, ``"OVERLAPS"``,
``"EQUALS"``, ``"RELATE"``, ``"DWITHIN"``, ``"BEYOND"``
:type op: str
:param pattern: the spatial relation pattern
:type pattern: str
:param distance: the distance value for distance based lookups:
``"DWITHIN"`` and ``"BEYOND"``
:type distance: float
:param units: the units the distance is expressed in
:type units: str
:return: a comparison expression object
:rtype: :class:`django.db.models.Q`
"""
assert op in (
"INTERSECTS",
"DISJOINT",
"CONTAINS",
"WITHIN",
"TOUCHES",
"CROSSES",
"OVERLAPS",
"EQUALS",
"RELATE",
"DWITHIN",
"BEYOND",
)
# if the left hand side is not a field reference, the comparison
# can be be inverted to try if the right hand side is a field
# reference.
if not isinstance(lhs, F):
lhs, rhs = rhs, lhs
op = INVERT_SPATIAL_OP.get(op, op)
# if neither lhs and rhs are fields, we have to fail here
if not isinstance(lhs, F):
raise ValueError(f"Unable to compare non-field {lhs}")
return Q(**{"%s__%s" % (lhs.name, op.lower()): rhs})
def spatial_relate(lhs: Union[F, Value], rhs: Union[F, Value], pattern: str) -> Q:
if not isinstance(lhs, F):
# TODO: cannot yet invert pattern -> raise
raise ValueError(f"Unable to compare non-field {lhs}")
return Q(**{"%s__relate" % lhs.name: (rhs, pattern)})
def spatial_distance(
lhs: Union[F, Value], rhs: Union[F, Value], op: str, distance: float, units: str
) -> Q:
if not isinstance(lhs, F):
lhs, rhs = rhs, lhs
# if neither lhs and rhs are fields, we have to fail here
if not isinstance(lhs, F):
raise ValueError(f"Unable to compare non-field {lhs}")
# TODO: maybe use D.unit_attname(units)
d = D(**{UNITS_LOOKUP[units]: distance})
if op == "DWITHIN":
return Q(**{"%s__distance_lte" % lhs.name: (rhs, d, "spheroid")})
return Q(**{"%s__distance_gte" % lhs.name: (rhs, d, "spheroid")})
def bbox(
lhs: F,
minx: float,
miny: float,
maxx,
maxy: float,
crs: Optional[str] = None,
bboverlaps: bool = True,
) -> Q:
"""Create a bounding box filter for the given spatial attribute.
:param lhs: the field to compare
:param minx: the lower x part of the bbox
:type minx: float
:param miny: the lower y part of the bbox
:type miny: float
:param maxx: the upper x part of the bbox
:type maxx: float
:param maxy: the upper y part of the bbox
:type maxy: float
:param crs: the CRS the bbox is expressed in
:type crs: str
:type lhs: :class:`django.db.models.F`
:return: a comparison expression object
:rtype: :class:`django.db.models.Q`
"""
box = Polygon.from_bbox((minx, miny, maxx, maxy))
if crs:
box.srid = SpatialReference(crs).srid
box.transform(4326)
if bboverlaps:
return Q(**{"%s__bboverlaps" % lhs.name: box})
return Q(**{"%s__intersects" % lhs.name: box})
def attribute(name: str, field_mapping: Optional[Dict[str, str]] = None) -> F:
"""Create an attribute lookup expression using a field mapping dictionary.
:param name: the field filter name
:type name: str
:param field_mapping: the dictionary to use as a lookup.
:rtype: :class:`django.db.models.F`
"""
if field_mapping:
field = field_mapping.get(name, name)
else:
field = name
return F(field)
def literal(value) -> Value:
return Value(value)
OP_TO_FUNC = {"+": add, "-": sub, "*": mul, "/": truediv}
def arithmetic(lhs: ArithmeticType, rhs: ArithmeticType, op: str) -> ArithmeticType:
"""Create an arithmetic filter
:param lhs: left hand side of the arithmetic expression. either a
scalar or a field lookup or another type of expression
:param rhs: same as `lhs`
:param op: the arithmetic operation. one of ``"+"``, ``"-"``, ``"*"``,
``"/"``
:rtype: :class:`django.db.models.F`
"""
func = OP_TO_FUNC[op]
return func(lhs, rhs)
pygeofilter-0.3.3/pygeofilter/backends/elasticsearch/ 0000775 0000000 0000000 00000000000 15121461241 0022760 5 ustar 00root root 0000000 0000000 pygeofilter-0.3.3/pygeofilter/backends/elasticsearch/__init__.py 0000664 0000000 0000000 00000003057 15121461241 0025076 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Fabian Schindler
#
# ------------------------------------------------------------------------------
# Copyright (C) 2022 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
"""Elasticsearch backend for pygeofilter."""
from .evaluate import to_filter
__all__ = ["to_filter"]
pygeofilter-0.3.3/pygeofilter/backends/elasticsearch/evaluate.py 0000664 0000000 0000000 00000023541 15121461241 0025145 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Fabian Schindler
#
# ------------------------------------------------------------------------------
# Copyright (C) 2022 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
"""
Elasticsearch filter evaluator.
Uses elasticsearch-dsl package to create filter objects.
"""
# pylint: disable=E1130,C0103,W0223
from datetime import date, datetime
from typing import Dict, Optional, Union
from elasticsearch_dsl import Q
from packaging.version import Version
from ... import ast, values
from ..evaluator import Evaluator, handle
from .util import like_to_wildcard
VERSION_7_10_0 = Version("7.10.0")
COMPARISON_OP_MAP = {
ast.ComparisonOp.LT: "lt",
ast.ComparisonOp.LE: "lte",
ast.ComparisonOp.GT: "gt",
ast.ComparisonOp.GE: "gte",
}
ARITHMETIC_OP_MAP = {
ast.ArithmeticOp.ADD: "+",
ast.ArithmeticOp.SUB: "-",
ast.ArithmeticOp.MUL: "*",
ast.ArithmeticOp.DIV: "/",
}
class ElasticSearchDSLEvaluator(Evaluator):
"""A filter evaluator for Elasticsearch DSL."""
def __init__(
self,
attribute_map: Optional[Dict[str, str]] = None,
version: Optional[Version] = None,
):
self.attribute_map = attribute_map
self.version = version or Version("7.1.0")
@handle(ast.Not)
def not_(self, _, sub):
"""Inverts a filter object."""
return ~sub
@handle(ast.And)
def and_(self, _, lhs, rhs):
"""Joins two filter objects with an `and` operator."""
return lhs & rhs
@handle(ast.Or)
def or_(self, _, lhs, rhs):
"""Joins two filter objects with an `or` operator."""
return lhs | rhs
@handle(ast.Equal, ast.NotEqual)
def equality(self, node, lhs, rhs):
"""Creates a match filter."""
q = Q("match", **{lhs: rhs})
if node.op == ast.ComparisonOp.NE:
q = ~q
return q
@handle(ast.LessThan, ast.LessEqual, ast.GreaterThan, ast.GreaterEqual)
def comparison(self, node, lhs, rhs):
"""Creates a `range` filter."""
return Q("range", **{lhs: {COMPARISON_OP_MAP[node.op]: rhs}})
@handle(ast.Between)
def between(self, node: ast.Between, lhs, low, high):
"""Creates a `range` filter."""
q = Q("range", **{lhs: {"gte": low, "lte": high}})
if node.not_:
q = ~q
return q
@handle(ast.Like)
def like(self, node: ast.Like, lhs):
"""Transforms the provided LIKE pattern to an Elasticsearch wildcard
pattern. Thus, this only works properly on "wildcard" fields.
Ignores case-sensitivity when Elasticsearch version is below 7.10.0.
"""
pattern = like_to_wildcard(
node.pattern, node.wildcard, node.singlechar, node.escapechar
)
expr: Dict[str, Union[str, bool]] = {
"value": pattern,
}
if self.version >= VERSION_7_10_0:
expr["case_insensitive"] = node.nocase
q = Q("wildcard", **{lhs: expr})
if node.not_:
q = ~q
return q
@handle(ast.In)
def in_(self, node, lhs, *options):
"""Creates a `terms` filter."""
q = Q("terms", **{lhs: options})
if node.not_:
q = ~q
return q
@handle(ast.IsNull)
def null(self, node: ast.IsNull, lhs):
"""Performs a null check, by using the `exists` query on the given
field.
"""
q = Q("exists", field=lhs)
if not node.not_:
q = ~q
return q
@handle(ast.Exists)
def exists(self, node: ast.Exists, lhs):
"""Performs an existense check, by using the `exists` query on the
given field
"""
q = Q("exists", field=lhs)
if node.not_:
q = ~q
return q
@handle(ast.TemporalPredicate, subclasses=True)
def temporal(self, node: ast.TemporalPredicate, lhs, rhs):
"""Creates a filter to match the given temporal predicate"""
op = node.op
if isinstance(rhs, (date, datetime)):
low = high = rhs
else:
low, high = rhs
query = "range"
not_ = False
predicate: Dict[str, Union[date, datetime, str]]
if op == ast.TemporalComparisonOp.DISJOINT:
not_ = True
predicate = {"gte": low, "lte": high}
elif op == ast.TemporalComparisonOp.AFTER:
predicate = {"gt": high}
elif op == ast.TemporalComparisonOp.BEFORE:
predicate = {"lt": low}
elif (
op == ast.TemporalComparisonOp.TOVERLAPS
or op == ast.TemporalComparisonOp.OVERLAPPEDBY
):
predicate = {"gte": low, "lte": high}
elif op == ast.TemporalComparisonOp.BEGINS:
query = "term"
predicate = {"value": low}
elif op == ast.TemporalComparisonOp.BEGUNBY:
query = "term"
predicate = {"value": high}
elif op == ast.TemporalComparisonOp.DURING:
predicate = {"gt": low, "lt": high, "relation": "WITHIN"}
elif op == ast.TemporalComparisonOp.TCONTAINS:
predicate = {"gt": low, "lt": high, "relation": "CONTAINS"}
# elif op == ast.TemporalComparisonOp.ENDS:
# pass
# elif op == ast.TemporalComparisonOp.ENDEDBY:
# pass
# elif op == ast.TemporalComparisonOp.TEQUALS:
# pass
# elif op == ast.TemporalComparisonOp.BEFORE_OR_DURING:
# pass
# elif op == ast.TemporalComparisonOp.DURING_OR_AFTER:
# pass
else:
raise NotImplementedError(f"Unsupported temporal operator: {op}")
q = Q(
query,
**{lhs: predicate},
)
if not_:
q = ~q
return q
@handle(
ast.GeometryIntersects,
ast.GeometryDisjoint,
ast.GeometryWithin,
ast.GeometryContains,
)
def spatial_comparison(self, node: ast.SpatialComparisonPredicate, lhs: str, rhs):
"""Creates a geo_shape query for the give spatial comparison
predicate.
"""
return Q(
"geo_shape",
**{
lhs: {
"shape": rhs,
"relation": node.op.value.lower(),
},
},
)
@handle(ast.BBox)
def bbox(self, node: ast.BBox, lhs):
"""Performs a geo_shape query for the given bounding box.
Ignores CRS parameter, as it is not supported by Elasticsearch.
"""
return Q(
"geo_shape",
**{
lhs: {
"shape": self.envelope(
values.Envelope(node.minx, node.maxx, node.miny, node.maxy)
),
"relation": "intersects",
},
},
)
@handle(ast.Attribute)
def attribute(self, node: ast.Attribute):
"""Attribute mapping from filter fields to elasticsearch fields.
If an attribute mapping is provided, it is used to look up the
field name from there.
"""
if self.attribute_map is not None:
return self.attribute_map[node.name]
return node.name
# @handle(ast.Arithmetic, subclasses=True)
# def arithmetic(self, node: ast.Arithmetic, lhs, rhs):
# op = ARITHMETIC_OP_MAP[node.op]
# return f"({lhs} {op} {rhs})"
# @handle(ast.Function)
# def function(self, node, *arguments):
# func = self.function_map[node.name]
# return f"{func}({','.join(arguments)})"
@handle(*values.LITERALS)
def literal(self, node):
"""Literal values are directly passed to elasticsearch-dsl"""
return node
@handle(values.Geometry)
def geometry(self, node: values.Geometry):
"""Geometry values are converted to a GeoJSON object"""
return node.geometry
@handle(values.Envelope)
def envelope(self, node: values.Envelope):
"""Envelope values are converted to an GeoJSON Elasticsearch
extension object."""
return {
"type": "envelope",
"coordinates": [
[
min(node.x1, node.x2),
max(node.y1, node.y2),
],
[
max(node.x1, node.x2),
min(node.y1, node.y2),
],
],
}
def to_filter(
root,
attribute_map: Optional[Dict[str, str]] = None,
version: Optional[str] = None,
):
"""Shorthand function to convert a pygeofilter AST to an Elasticsearch
filter structure.
"""
return ElasticSearchDSLEvaluator(
attribute_map, Version(version) if version else None
).evaluate(root)
pygeofilter-0.3.3/pygeofilter/backends/elasticsearch/util.py 0000664 0000000 0000000 00000004321 15121461241 0024307 0 ustar 00root root 0000000 0000000 # ------------------------------------------------------------------------------
#
# Project: pygeofilter
# Authors: Fabian Schindler
#
# ------------------------------------------------------------------------------
# Copyright (C) 2022 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ------------------------------------------------------------------------------
"""General utilities for the Elasticsearch backend."""
import re
def like_to_wildcard(
value: str, wildcard: str, single_char: str, escape_char: str = "\\"
) -> str:
"""Adapts a "LIKE" pattern to create an elasticsearch "wildcard"
pattern.
"""
x_wildcard = re.escape(wildcard)
x_single_char = re.escape(single_char)
if escape_char == "\\":
x_escape_char = "\\\\\\\\"
else:
x_escape_char = re.escape(escape_char)
if wildcard != "*":
value = re.sub(
f"(?