pax_global_header00006660000000000000000000000064151271426400014514gustar00rootroot0000000000000052 comment=6bd0a3d619c8d422abf1f1a6b08a002ba242180b jg-rp-python-jsonpath-830094f/000077500000000000000000000000001512714264000161555ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/.github/000077500000000000000000000000001512714264000175155ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/.github/FUNDING.yml000066400000000000000000000000161512714264000213270ustar00rootroot00000000000000github: jg-rp jg-rp-python-jsonpath-830094f/.github/workflows/000077500000000000000000000000001512714264000215525ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/.github/workflows/lint.yaml000066400000000000000000000006351512714264000234100ustar00rootroot00000000000000name: lint on: [push, pull_request] jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Set up Python 3.10 uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install --upgrade hatch - run: hatch run lint jg-rp-python-jsonpath-830094f/.github/workflows/tests-no-regex.yaml000066400000000000000000000007331512714264000253250ustar00rootroot00000000000000name: test-no-regex on: [push, pull_request] jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 with: submodules: true - name: Set up Python 3.11 uses: actions/setup-python@v4 with: python-version: "3.11" - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install --upgrade hatch - run: hatch -e no-regex run test jg-rp-python-jsonpath-830094f/.github/workflows/tests.yaml000066400000000000000000000025661512714264000236110ustar00rootroot00000000000000name: tests on: [push, pull_request] jobs: tests: name: ${{ matrix.python-version }} on ${{ startsWith(matrix.os, 'macos-') && 'macOS' || startsWith(matrix.os, 'windows-') && 'Windows' || 'Linux' }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: os: [ubuntu-latest, windows-latest, macos-latest] python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] exclude: - os: macos-latest python-version: "3.8" - os: windows-latest python-version: "3.8" - os: macos-latest python-version: "3.9" - os: windows-latest python-version: "3.9" - os: macos-latest python-version: "3.10" - os: windows-latest python-version: "3.10" - os: macos-latest python-version: "3.11" - os: windows-latest python-version: "3.11" steps: - uses: actions/checkout@v3 with: submodules: true - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install --upgrade hatch - name: Test run: hatch run test jg-rp-python-jsonpath-830094f/.github/workflows/typing.yaml000066400000000000000000000006411512714264000237510ustar00rootroot00000000000000name: typing on: [push, pull_request] jobs: lint: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Set up Python 3.11 uses: actions/setup-python@v4 with: python-version: "3.11" - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install --upgrade hatch - run: hatch run typing jg-rp-python-jsonpath-830094f/.gitignore000066400000000000000000000020571512714264000201510ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python env/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover .hypothesis/ .pytest_cache/ cov.xml # Translations *.mo *.pot # Documentation docs/_build/ site/ # PyBuilder target/ # IPython Notebook .ipynb_checkpoints # pyenv .python-version # dotenv .env # virtualenv .venv/ venv/ ENV/ # Rope project settings .ropeproject # IDE .vscode/ .idea/ *.iml *.ipr .junie # Dev utils dev.py benchmark.py profile_.py # Test fixtures comparison_regression_suite.yaml cts.json # System .DS_Store # Rob-specific generated_temp/jg-rp-python-jsonpath-830094f/.gitmodules000066400000000000000000000003541512714264000203340ustar00rootroot00000000000000[submodule "tests/cts"] path = tests/cts url = https://github.com/jsonpath-standard/jsonpath-compliance-test-suite.git [submodule "tests/nts"] path = tests/nts url = https://github.com/jg-rp/jsonpath-compliance-normalized-paths.git jg-rp-python-jsonpath-830094f/CHANGELOG.md000066400000000000000000000576141512714264000200030ustar00rootroot00000000000000# Python JSONPath Change Log ## Version 2.0.2 **Fixes** - Fixed parsing of non-standard JSONPath regular expression literals containing an escaped solidus (`/`). This affected queries using the regex operator `=~`, like `$.some[?(@.thing =~ /fo\/[a-z]/)]`, not standard `match` and `search` functions. See [#124](https://github.com/jg-rp/python-jsonpath/issues/124). ## Version 2.0.1 **Fixes** - Fixed JSON pointers with negative indices. Previously, negative indices were resolved against array-like values, but the JSON Pointer specification (RFC 6901) does not permit negative array indexes. We now raise a `JSONPointerIndexError` when a JSON Pointer attempts to resolve an array element using a negative index. For users who require negative indices in JSON Pointers, you can set `JSONPointer.min_int_index` to a suitably negative integer, like `JSONPointer.min_int_index = -(2**53) + 1`. See [#116](https://github.com/jg-rp/python-jsonpath/pull/116). - Fixed the JSON Patch `add` operation. Previously, a `JSONPatchError` was raised when pointing to an array index equal to the array's length. Now we append to arrays in such cases. See [#117](https://github.com/jg-rp/python-jsonpath/issues/117). ## Version 2.0.0 **JSONPath syntax changes** These breaking changes affect the **default configuration** of Python JSONPath. Version 2 also introduces a new _strict mode_, which enforces full compliance with [RFC 9535](https://datatracker.ietf.org/doc/html/rfc9535). See [optional dependencies](https://jg-rp.github.io/python-jsonpath/#optional-dependencies) and the [syntax guide](https://jg-rp.github.io/python-jsonpath/syntax/) for details. - **Bracket notation** - unquoted property names are no longer treated as quoted names. - Before: `$[foo]`, `$['foo']`, and `$["foo"]` were equivalent. - Now: `$[foo]` is a _singular query selector_. With an implicit root identifier, `$.a[b]` is equivalent to `$.a[$.b]`. See [Singular query selector](https://jg-rp.github.io/python-jsonpath/syntax/#singular-query-selector). - **Filter expressions** - float literals must follow the RFC. - `.1` is now invalid (use `0.1`) - `1.` is now invalid (use `1.0`) - **Slice selectors** - indexes and steps must follow the RFC. - Leading zeros and negative zero are no longer valid and raise `JSONPathSyntaxError`. - **Dot notation** - no whitespace is allowed between `.` or `..` and the following name. Whitespace before the dot is still permitted. **JSONPath function extension changes** - Added the `startswith(value, prefix)` function extension. Returns `True` if both arguments are strings and `prefix` is a prefix of `value`. See the [filter functions](https://jg-rp.github.io/python-jsonpath/functions/#startswith) documentation. - Reimplemented the non-standard `keys()` function extension. It used to be a simple Python function, `jsonpath.function_extensions.keys`. Now it is a "well-typed" class, `jsonpath.function_extensions.Keys`. See the [filter functions](https://jg-rp.github.io/python-jsonpath/functions/#keys) documentation. - Added `cache_capacity`, `debug` and `thread_safe` arguments to `jsonpath.function_extensions.Match` and `jsonpath.function_extensions.Search` constructors. **JSONPath features** - Added the [Keys filter selector](https://jg-rp.github.io/python-jsonpath/syntax/#keys-filter-selector). - Added the [Singular query selector](https://jg-rp.github.io/python-jsonpath/syntax/#singular-query-selector). - Match and search function extensions now use the [`regex`](https://pypi.org/project/regex/) package (if installed) instead of `re`. See [optional dependencies](https://jg-rp.github.io/python-jsonpath/#optional-dependencies). - Added the `strict` argument to all [convenience functions](https://jg-rp.github.io/python-jsonpath/convenience/), the CLI and the `JSONPathEnvironment` constructor. When `strict=True`, all non-standard extensions and relaxed parsing rules are disabled. - Added class variable `JSONPathEnvironment.max_recursion_depth` to control the maximum recursion depth of descendant segments. - Improved exception messages (prettier, more informative). **Python API changes** - Renamed class variable `JSONPathEnvironment.fake_root_token` to `JSONPathEnvironment.pseudo_root_token`. **Low level API changes** These only affect projects customizing the JSONPath lexer or parser. - The tokens produced by the JSONPath lexer have changed. Previously we broadly skipped some punctuation and whitespace. Now the parser can make better choices about when to accept whitespace and do a better job of enforcing dots. - We've change the internal representation of compiled JSONPath queries. We now model segments and selectors explicitly and use terminology that matches RFC 9535. ## Version 1.3.2 **Fixes** - Fixed JSONPath filter context data in embedded JSONPath queries. We were failing to pass on said context data when resolving embedded queries. See [#103](https://github.com/jg-rp/python-jsonpath/issues/103). ## Version 1.3.1 **Fixes** - Fixed the non-standard JSON Patch operation, `addap`. Previously it was behaving like `addne`. See [#81](https://github.com/jg-rp/python-jsonpath/pull/81). - Fixed JSON Patch ops that operate on mappings and have a target that looks like an int. We now ensure the target is a string. See [#82](https://github.com/jg-rp/python-jsonpath/pull/82). ## Version 1.3.0 **Fixes** - Fixed `jsonpath.JSONPathMatch.path`. It is now a "normalized path" following section 2.7 of RFC 9535. - Fixed normalized slice indexes. We were failing to normalize some indexes given a negative step. **Other changes** - `jsonpath.match.NodeList` is now re-exported as `jsonpath.NodeList`. - Added `jsonpath.NodeList.paths()`, which returns a list of normalized paths, one for each node in the list. - Serialization of compiled JSONPath queries (instances of `jsonpath.JSONPath`) has changed. String literals inside filter selectors are now serialized using the canonical format, as described in section 2.7 of RFC 9535, and parentheses in filter selectors are kept to a minimum. ## Version 1.2.2 **Fixes** - Fixed parsing of bare name selectors that start with a reserved word. See [issue #72](https://github.com/jg-rp/python-jsonpath/issues/72). **Changes** - We've dropped support for Python 3.7, which was end of life in June 2023. ## Version 1.2.1 **Fixes** - Fixed the string representation regex literals in filter expressions. See [issue #70](https://github.com/jg-rp/python-jsonpath/issues/70). ## Version 1.2.0 **Fixes** - Fixed handling of JSONPath literals in filter expressions. We now raise a `JSONPathSyntaxError` if a filter expression literal is not part of a comparison, membership or function expression. See [jsonpath-compliance-test-suite#81](https://github.com/jsonpath-standard/jsonpath-compliance-test-suite/pull/81). - Fixed parsing of number literals including an exponent. Upper case 'e's are now allowed. - Fixed handling of trailing commas in bracketed selection lists. We now raise a `JSONPathSyntaxError` in such cases. **Compliance** - Skipped tests for invalid escape sequences. The JSONPath spec is more strict than Python's JSON decoder when it comes to parsing `\u` escape sequences in string literals. We are adopting a policy of least surprise. The assertion is that most people will expect the JSONPath parser to behave the same as Python's JSON parser. See [jsonpath-compliance-test-suite #87](https://github.com/jsonpath-standard/jsonpath-compliance-test-suite/pull/87). - Skipped tests for invalid integer and float literals. Same as above. We are deliberately choosing to match Python's int and float parsing behavior. See [jsonpath-compliance-test-suite #89](https://github.com/jsonpath-standard/jsonpath-compliance-test-suite/pull/89). - Skipped tests for incorrect casing `true`, `false` and `null` literals. **Features** - Allow JSONPath filter expression membership operators (`contains` and `in`) to operate on object/mapping data as well as arrays/sequences. See [#55](https://github.com/jg-rp/python-jsonpath/issues/55). - Added a `select()` method to the JSONPath [query iterator interface](https://jg-rp.github.io/python-jsonpath/query/), generating a projection of each JSONPath match by selecting a subset of its values. - Added the `query()` method to the `JSONPath` class. Get a query iterator from an already compiled path. - Added the `addne` and `addap` operations to [JSONPatch](https://jg-rp.github.io/python-jsonpath/api/#jsonpath.JSONPatch). `addne` (add if not exists) is like the standard `add` operation, but only adds object keys/values if the key does not exist. `addap` (add or append) is like the standard `add` operation, but assumes an index of `-` if the target index can not be resolved. ## Version 1.1.1 **Fixes** - Fixed evaluation of JSONPath singular queries when they appear in a logical expression and without a comparison operator. Previously we were evaluating logical expressions with the value held by the single element node list, now we treat such filter queries as existence tests. See [#57](https://github.com/jg-rp/python-jsonpath/issues/57). ## Version 1.1.0 **Fixes** - Fixed logical operator precedence in JSONPath filter expressions. Previously, logical _or_ (`||`) and logical _and_ (`&&`) had equal precedence. Now `&&` binds more tightly than `||`, as per RFC 9535. - Fixed JSONPath bracketed selector list evaluation order. Previously we were iterating nodes for every list item, now we exhaust all matches for the first item before moving on to the next item. **Features** - Added the "query API", a fluent, chainable interface for manipulating `JSONPathMatch` iterators ([docs](https://jg-rp.github.io/python-jsonpath/query/), [source](https://github.com/jg-rp/python-jsonpath/blob/7665105de1501a5b2172f63a88db6d08b2b1702d/jsonpath/fluent_api.py#L17)). ## Version 1.0.0 [RFC 9535](https://datatracker.ietf.org/doc/html/rfc9535) (JSONPath: Query Expressions for JSON) is now out, replacing the [draft IETF JSONPath base](https://datatracker.ietf.org/doc/html/draft-ietf-jsonpath-base-21). **Breaking Changes** - The undocumented `keys` function extension is no longer enabled by default. A new, well-typed `keys` function is planned for the future. **Fixes** - The lexer now sorts environment-controlled tokens by their length in descending order. This allows one custom token to be a prefix of another. **Features** - Added the non-standard "fake root" identifier, which defaults to `^` and can be customized with the `fake_root_token` attribute on a `JSONPathEnvironment` subclass. Using the fake root identifier is equivalent to the standard root identifier (`$`), but wraps the target JSON value in an array, so the root value can be conditionally selected using a filter. - Non-standard environment-controlled tokens can now be disabled by setting them to the empty string. ## Version 0.10.3 **Breaking Changes** - Changed the exception raised when attempting to compare a non-singular filter query from `JSONPathSyntaxError` to `JSONPathTypeError`. **Fixes** - Fixed handling of relative and root queries when used as arguments to filter functions. Previously, when those queries resulted in an empty node list, we were converting them to an empty regular list before passing it to functions that accept _ValueType_ arguments. Now, in such cases, we convert empty node lists to the special result _Nothing_, which is required by the spec. - Fixed well-typedness checks on JSONPath logical expressions (those that involve `&&` or `||`) and non-singular filter queries. Previously we were erroneously applying the checks for comparison expressions to logical expressions too. Now non-singular queries in logical expressions act as an existence test. See [#45] (https://github.com/jg-rp/python-jsonpath/issues/45). ## Version 0.10.2 **Fixes** - Fixed precedence of the logical not operator in JSONPath filter expressions. Previously, logical _or_ and logical _and_ had priority over _not_. See [#41](https://github.com/jg-rp/python-jsonpath/issues/41). ## Version 0.10.1 **Hot fix** - Fixed priority of JSONPath lexer rules. Previously, standard short tokens (like `*` and `?`) had a higher priority than environment-controlled tokens (like `JSONPathEnvironment.keys_selector_token`), making it impossible to incorporate short token characters into longer environment-controlled tokens. ## Version 0.10.0 **Breaking Changes** - We now enforce JSONPath filter expression "well-typedness" by default. That is, filter expressions are checked at compile time according to the [IETF JSONPath Draft function extension type system](https://datatracker.ietf.org/doc/html/draft-ietf-jsonpath-base-21#section-2.4.1) and rules regarding non-singular query usage. If an expression is deemed to not be well-typed, a `JSONPathTypeError` is raised. This can be disabled in Python JSONPath by setting the `well_typed` argument to `JSONPathEnvironment` to `False`, or using `--no-type-checks` on the command line. See [#33](https://github.com/jg-rp/python-jsonpath/issues/33). - The JSONPath lexer and parser have been refactored to accommodate [#30](https://github.com/jg-rp/python-jsonpath/issues/30). As a result, the tokens generated by the lexer and the ATS built by the parser have changed significantly. In the unlikely event that anyone is customizing the lexer or parser through subclassing, please [open an issue](https://github.com/jg-rp/python-jsonpath/issues) and I'll provide more details. - Changed the normalized representation of JSONPath string literals to use double quotes instead of single quotes. - Changed the normalized representation of JSONPath filter expressions to not include parentheses unless the expression includes one or more logical operators. - The built-in implementation of the standard `length()` filter function is now a class and is renamed to `jsonpath.function_extensions.Length`. - The built-in implementation of the standard `value()` filter function is now a class and is renamed to `jsonpath.function_extensions.Value`. **Fixes** - We no longer silently ignore invalid escape sequences in JSONPath string literals. For example, `$['\"']` used to be OK, it now raises a `JSONPathSyntaxError`. See [#31](https://github.com/jg-rp/python-jsonpath/issues/31). - Fixed parsing of JSONPath integer literals that use scientific notation. Previously we raised a `JSONPathSyntaxError` for literals such as `1e2`. - Fixed parsing of JSONPath comparison and logical expressions as filter function arguments. Previously we raised a `JSONPathSyntaxError` if a comparison or logical expression appeared as a filter function argument. Note that none of the built-in, standard filter functions accept arguments of `LogicalType`. - Fixed parsing of nested JSONPath filter functions, where a function is used as an argument to another. - Fixed JSONPath bracketed segments. We now handle an arbitrary number of filter selectors alongside name, index, slice and wildcard selectors, separated by commas. See [#30](https://github.com/jg-rp/python-jsonpath/issues/30). ## Version 0.9.0 **Breaking Changes** - `CompoundJSONPath` instances are no longer updated in-place when using `.union()` and `.intersection()`. Instead, a new `CompoundJSONPath` is returned. `CompoundJSONPath.paths` is now a tuple instead of a list. **Fixes** - Fixed a bug with the parsing of JSON Pointers. When given an arbitrary string without slashes, `JSONPointer` would resolve to the document root. The empty string is the only valid pointer that should resolve to the document root. We now raise a `JSONPointerError` in such cases. See [#27](https://github.com/jg-rp/python-jsonpath/issues/27). - Fixed handling of JSON documents containing only a top-level string. **Features** - Added a command line interface, exposing JSONPath, JSON Pointer and JSON Patch features ([docs](https://jg-rp.github.io/python-jsonpath/cli/), [source](https://github.com/jg-rp/python-jsonpath/blob/main/jsonpath/cli.py)). - Added `JSONPointer.parent()`, a method that returns the parent of the pointer, as a new `JSONPointer` ([docs](https://jg-rp.github.io/python-jsonpath/pointers/#parent)). - Implemented `JSONPointer.__truediv__()` to allow creation of child pointers from an existing pointer using the slash (`/`) operator ([docs](https://jg-rp.github.io/python-jsonpath/pointers/#slash-operator)). - Added `JSONPointer.join()`, a method for creating child pointers. This is equivalent to using the slash (`/`) operator for each argument given to `join()` ([docs](https://jg-rp.github.io/python-jsonpath/pointers/#joinparts)). - Added `JSONPointer.exists()`, a method that returns `True` if a the pointer can be resolved against some data, or `False` otherwise ([docs](https://jg-rp.github.io/python-jsonpath/pointers/#existsdata)). - Added the `RelativeJSONPointer` class for building new `JSONPointer` instances from Relative JSON Pointer syntax ([docs](https://jg-rp.github.io/python-jsonpath/pointers/#torel), [API](https://jg-rp.github.io/python-jsonpath/api/#jsonpath.RelativeJSONPointer)). - Added support for a non-standard index/property pointer using `#`. This is to support Relative JSON Pointer's use of hash (`#`) when building `JSONPointer` instances from relative JSON Pointers. - Added the `unicode_escape` argument to `JSONPathEnvironment`. When `True` (the default), UTF-16 escaped sequences found in JSONPath string literals will be decoded. ## Version 0.8.1 **Fixes** - Fixed the string representation of a `JSONPointer` when built using `JSONPointer.from_parts()` and pointing to the document root. See [#21](https://github.com/jg-rp/python-jsonpath/issues/21). ## Version 0.8.0 **Breaking changes** - Changed the `JSONPathMatch.parts` representation of the non-standard _keys_ selector (default `~`) to be `~` followed by the key name. It used to be two "parts", `~` and key index. - All `FilterExpression` subclasses must now implement `children()` and `set_children()`. These methods facilitate filter expression inspection and caching. **Fixes** - Changed `findall()` and `finditer()` to accept `data` arguments of any `io.IOBase` subclass, not just `TextIO`. **Features** - Added the `JSONPointer` class and methods for converting a `JSONPathMatch` to a `JSONPointer`. `JSONPointer` is compliant with [RFC 6901](https://datatracker.ietf.org/doc/html/rfc6901) ([docs](https://jg-rp.github.io/python-jsonpath/api/#jsonpath.JSONPointer)). - Added the `JSONPatch` class. `JSONPatch` implements [RFC 6902](https://datatracker.ietf.org/doc/html/rfc6902) ([docs](https://jg-rp.github.io/python-jsonpath/api/#jsonpath.JSONPatch)). - Added `jsonpath.pointer.resolve()`, a convenience function for resolving a JSON Pointer ([docs](https://jg-rp.github.io/python-jsonpath/quickstart/#pointerresolvepointer-data)). - Added `jsonpath.patch.apply()`, a convenience function for applying a JSON Patch ([docs](https://jg-rp.github.io/python-jsonpath/quickstart/#patchapplypatch-data)). - Added `jsonpath.match()`, a convenience function returning a `JSONPathMatch` instance for the first match of a path, or `None` if there were no matches ([docs](https://jg-rp.github.io/python-jsonpath/quickstart/#matchpath-data)). - Added filter expression caching. Controlled with the `filter_caching` argument to `JSONPathEnvironment`, filter expression caching is enabled by default. See [#14] - All selectors now use `env.match_class` to instantiate new `JSONPathMatch` objects. This allows for subclassing of `JSONPathMatch`. - Added `jsonpath.filter.walk()` for the benefit of filter expression static analysis. ## Version 0.7.1 **Fixes** - Fixed a bug with the filter context selector (default `_`) when it's used as a filter function argument. ## Version 0.7.0 **Breaking changes** - `JSONPathIndexError` now requires a `token` parameter. It used to be optional. - Filter expressions that resolve JSON paths (like `SelfPath` and `RootPath`) now return a `NodeList`. The node list must then be explicitly unpacked by `JSONPathEnvironment.compare()` and any filter function that has a `with_node_lists` attribute set to `True`. This is done for the benefit of the `count()` filter function and standards compliance. **Features** - `missing` is now an allowed alias of `undefined` when using the `isinstance()` filter function. **IETF JSONPath Draft compliance** - The built-in `count()` filter function is now compliant with the standard, operating on a "nodelist" instead of node values. ## Version 0.6.0 **Breaking changes** - The "extra filter context" identifier now defaults to `_`. Previously it defaulted to `#`, but it has been decided that `#` is better suited as a current key/property or index identifier. **Features** - Added a non-standard keys/properties selector ([docs](https://jg-rp.github.io/python-jsonpath/syntax/#keys-or), [source](https://github.com/jg-rp/python-jsonpath/blob/main/jsonpath/selectors.py#L193)). - Added a non-standard `typeof()` filter function. `type()` is an alias for `typeof()` ([docs](https://jg-rp.github.io/python-jsonpath/functions/#typeof), [source](https://github.com/jg-rp/python-jsonpath/blob/main/jsonpath/function_extensions/typeof.py)). - Added a non-standard `isinstance()` filter function. `is()` is an alias for `isinstance()` ([docs](https://jg-rp.github.io/python-jsonpath/functions/#isinstance), [source](https://github.com/jg-rp/python-jsonpath/blob/main/jsonpath/function_extensions/is_instance.py)). - Added a current key/property or index identifier. When filtering a mapping, `#` will hold key associated with the current node (`@`). When filtering a sequence, `#` will hold the current index. See [docs](https://jg-rp.github.io/python-jsonpath/syntax/#filters-expression). **IETF JSONPath Draft compliance** - Don't allow leading zeros in index selectors. We now raise a `JSONPathSyntaxError`. - Validate the built-in `count()` function's argument is array-like. ## Version 0.5.0 **Features** - Added the built-in `match` filter function. - Added the built-in `search` filter function. - Added the built-in `value` filter function. - Pass the current environment to filter function validation. - Added support for the wildcard selector in selector segment lists. **Fixes** - Fixed a bug where the current object identifier (`@`) would evaluate to `undefined` when a filter is applied to an array of strings. - Compound paths that have a trailing `|` or `&` now raise a `JSONPathSyntaxError`. **IETF JSONPath Draft compliance** - Removed support for dotted index selectors. - Raise a `JSONPathSyntaxError` for unescaped whitespace and control characters. - Raise a `JSONPathSyntaxError` for empty selector segments. - Raise a `JSONPathIndexError` if an index selector is out of range. - Raise a `JSONPathSyntaxError` for too many colons in a slice selector. - Raise a `JSONPathIndexError` if a slice selector argument is out of range. - Allow nested filters. ## Version 0.4.0 **IETF JSONPath Draft compliance** - **Behavioral change.** When applied to a JSON object, filters now have an implicit preceding wildcard selector and the "current" (`@`) object is set to each of the object's values. This is now consistent with applying filters to arrays and adheres to the IETF JSONPath Internet Draft. ## Version 0.3.0 **IETF JSONPath Draft compliance** - Added support for function extensions. - Added the built-in `length()` function. - Added the built-in `count()` function. `count()` is an alias for `length()`. - Support filters without parentheses. - Adhere to IETF JSONPath draft escaping in quoted property selectors. - Handle UTF-16 surrogate pairs in quoted property selectors. **Features** - Added the built-in `keys()` function. - Added `parent` and `children` properties to `JSONPathMatch`. Now we can traverse the "document tree" after finding matches. - Added a `parts` property to `JSONPathMatch`. `parts` is a tuple of `int`s, `slice`s and `str`s that can be used with `JSONPathEnvironment.getitem()` to get the matched object from the original data structure, or equivalent data structures. It is the keys, indices and slices that make up a concrete path. ## Version 0.2.0 **Fixes** - Fixed a bug with `CompoundJSONPath.finditer()` and the intersection operator (`&`). The intersection operation was returning just the left hand results. ## Version 0.1.0 First release jg-rp-python-jsonpath-830094f/CONTRIBUTING.md000066400000000000000000000042061512714264000204100ustar00rootroot00000000000000# Contributing to Python JSONPath Hi. Your contributions and questions are always welcome. Feel free to ask questions, report bugs or request features on the [issue tracker](https://github.com/jg-rp/python-jsonpath/issues) or on [Github Discussions](https://github.com/jg-rp/python-jsonpath/discussions). Pull requests are welcome too. **Table of contents** - [Development](#development) - [Documentation](#documentation) - [Style Guides](#style-guides) ## Development The [JSONPath Compliance Test Suite](https://github.com/jsonpath-standard/jsonpath-compliance-test-suite) and [JSONPath Normalized Path Test Suite](https://github.com/jg-rp/jsonpath-compliance-normalized-paths) are included in this repository as Git [submodules](https://git-scm.com/book/en/v2/Git-Tools-Submodules). Clone this project and initialize the submodules with something like: ```shell $ git clone git@github.com:jg-rp/python-jsonpath.git $ cd python-jsonpath $ git submodule update --init ``` We use [hatch](https://hatch.pypa.io/latest/) to manage project dependencies and development environments. Run tests with the _test_ script. ```shell $ hatch run test ``` Lint with [ruff](https://beta.ruff.rs/docs/). ```shell $ hatch run lint ``` Typecheck with [Mypy](https://mypy.readthedocs.io/en/stable/). ```shell $ hatch run typing ``` Check coverage with pytest-cov. ```shell $ hatch run cov ``` Or generate an HTML coverage report. ```shell $ hatch run cov-html ``` Then open `htmlcov/index.html` in your browser. ## Style Guides ### Git Commit Messages There are no hard rules for git commit messages, although you might like to indicate the type of commit by starting the message with `docs:`, `chore:`, `feat:`, `fix:` or `refactor:`, for example. ### Python Style We use [Ruff](https://docs.astral.sh/ruff/) to lint and format all Python files. Ruff is configured to: - follow [Black](https://github.com/psf/black), with its default configuration. - expect [Google style docstrings](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html). - enforce Python imports according to [isort](https://pycqa.github.io/isort/) with `force-single-line = true`. jg-rp-python-jsonpath-830094f/LICENSE.txt000066400000000000000000000021151512714264000177770ustar00rootroot00000000000000MIT License Copyright (c) 2023-present James Prior Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.jg-rp-python-jsonpath-830094f/README.md000066400000000000000000000121461512714264000174400ustar00rootroot00000000000000

Python JSONPath

A flexible JSONPath engine for Python.
We follow RFC 9535 and test against the JSONPath Compliance Test Suite.

License Tests PyPI - Downloads
PyPi - Version Python versions

--- **Table of Contents** - [Install](#install) - [Links](#links) - [Related projects](#related-projects) - [Examples](#examples) - [License](#license) ## Install Install Python JSONPath using [pip](https://pip.pypa.io/en/stable/getting-started/): ``` pip install python-jsonpath ``` Or [Pipenv](https://pipenv.pypa.io/en/latest/): ``` pipenv install -u python-jsonpath ``` Or from [conda-forge](https://anaconda.org/conda-forge/python-jsonpath): ``` conda install -c conda-forge python-jsonpath ``` ## Links - Documentation: https://jg-rp.github.io/python-jsonpath/. - JSONPath Syntax: https://jg-rp.github.io/python-jsonpath/syntax/ - Change log: https://github.com/jg-rp/python-jsonpath/blob/main/CHANGELOG.md - PyPi: https://pypi.org/project/python-jsonpath - Source code: https://github.com/jg-rp/python-jsonpath - Issue tracker: https://github.com/jg-rp/python-jsonpath/issues ## Related projects - [JSONPath RFC 9535](https://github.com/jg-rp/python-jsonpath-rfc9535) - A minimal, slightly cleaner Python implementation of RFC 9535. If you're not interested JSONPath sytax beyond that defined in RFC 9535, you might choose [jsonpath-rfc9535](https://pypi.org/project/jsonpath-rfc9535/) over [python-jsonpath](https://pypi.org/project/python-jsonpath/). jsonpath-rfc9535 also includes utilities for verifying and testing the [JSONPath Compliance Test Suite](https://github.com/jsonpath-standard/jsonpath-compliance-test-suite). Most notably the nondeterministic behavior of some JSONPath selectors. - [JSON P3](https://github.com/jg-rp/json-p3) - RFC 9535 implemented in TypeScript. JSON P3 does not include all the non-standard features of Python JSONPath, but does define some optional [extra syntax](https://jg-rp.github.io/json-p3/guides/jsonpath-extra). - [Ruby JSON P3](https://github.com/jg-rp/ruby-json-p3) - RFC 9535, RFC 6901 and RFC 6902 implemented in Ruby. ## Examples ### JSONPath ```python import jsonpath data = { "users": [ {"name": "Sue", "score": 100}, {"name": "John", "score": 86}, {"name": "Sally", "score": 84}, {"name": "Jane", "score": 55}, ] } user_names = jsonpath.findall("$.users[?@.score < 100].name", data) print(user_names) # ['John', 'Sally', 'Jane'] ``` ### JSON Pointer We include an [RFC 6901](https://datatracker.ietf.org/doc/html/rfc6901) compliant implementation of JSON Pointer. See JSON Pointer [quick start](https://jg-rp.github.io/python-jsonpath/quickstart/#pointerresolvepointer-data), [guide](https://jg-rp.github.io/python-jsonpath/pointers/) and [API reference](https://jg-rp.github.io/python-jsonpath/api/#jsonpath.JSONPointer) ```python from jsonpath import pointer data = { "users": [ {"name": "Sue", "score": 100}, {"name": "John", "score": 86}, {"name": "Sally", "score": 84}, {"name": "Jane", "score": 55}, ] } sue_score = pointer.resolve("/users/0/score", data) print(sue_score) # 100 jane_score = pointer.resolve(["users", 3, "score"], data) print(jane_score) # 55 ``` ### JSON Patch We also include an [RFC 6902](https://datatracker.ietf.org/doc/html/rfc6902) compliant implementation of JSON Patch. See JSON Patch [quick start](https://jg-rp.github.io/python-jsonpath/quickstart/#patchapplypatch-data) and [API reference](https://jg-rp.github.io/python-jsonpath/api/#jsonpath.JSONPatch) ```python from jsonpath import patch patch_operations = [ {"op": "add", "path": "/some/foo", "value": {"foo": {}}}, {"op": "add", "path": "/some/foo", "value": {"bar": []}}, {"op": "copy", "from": "/some/other", "path": "/some/foo/else"}, {"op": "add", "path": "/some/foo/bar/-", "value": 1}, ] data = {"some": {"other": "thing"}} patch.apply(patch_operations, data) print(data) # {'some': {'other': 'thing', 'foo': {'bar': [1], 'else': 'thing'}}} ``` ## License `python-jsonpath` is distributed under the terms of the [MIT](https://spdx.org/licenses/MIT.html) license. jg-rp-python-jsonpath-830094f/docs/000077500000000000000000000000001512714264000171055ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/docs/advanced.md000066400000000000000000000234701512714264000212020ustar00rootroot00000000000000# Advanced Usage ## Filter Variables Arbitrary variables can be made available to [filter selectors](syntax.md#filter-selector) using the `filter_context` argument to [`findall()`](quickstart.md#findallpath-data) and [`finditer()`](quickstart.md#finditerpath-data). `filter_context` should be a [mapping](https://docs.python.org/3/library/typing.html#typing.Mapping) of strings to JSON-like objects, like lists, dictionaries, strings and integers. Filter context variables are selected using a filter query starting with the _filter context identifier_, which defaults to `_` and has usage similar to `$` and `@`. ```python import jsonpath data = { "users": [ { "name": "Sue", "score": 100, }, { "name": "John", "score": 86, }, { "name": "Sally", "score": 84, }, { "name": "Jane", "score": 55, }, ] } user_names = jsonpath.findall( "$.users[?@.score < _.limit].name", data, filter_context={"limit": 100}, ) ``` ## Function Extensions Add, remove or replace [filter functions](functions.md) by updating the [`function_extensions`](api.md#jsonpath.JSONPathEnvironment.function_extensions) attribute of a [`JSONPathEnvironment`](api.md#jsonpath.JSONPathEnvironment). It is a regular Python dictionary mapping filter function names to any [callable](https://docs.python.org/3/library/typing.html#typing.Callable), like a function or class with a `__call__` method. ### Type System for Function Expressions [Section 2.4.1](https://datatracker.ietf.org/doc/html/rfc9535#name-type-system-for-function-ex) of RFC 9535 defines a type system for function expressions and requires that we check that filter expressions are well-typed. With that in mind, you are encouraged to implement custom filter functions by extending [`jsonpath.function_extensions.FilterFunction`](api.md#jsonpath.function_extensions.FilterFunction), which forces you to be explicit about the [types](api.md#jsonpath.function_extensions.ExpressionType) of arguments the function extension accepts and the type of its return value. !!! info [`FilterFunction`](api.md#jsonpath.function_extensions.FilterFunction) was new in Python JSONPath version 0.10.0. Prior to that we did not enforce function expression well-typedness. To use any arbitrary [callable](https://docs.python.org/3/library/typing.html#typing.Callable) as a function extension - or if you don't want built-in filter functions to raise a `JSONPathTypeError` for function expressions that are not well-typed - set [`well_typed`](api.md#jsonpath.JSONPathEnvironment.well_typed) to `False` when constructing a [`JSONPathEnvironment`](api.md#jsonpath.JSONPathEnvironment). ### Example As an example, we'll add a `min()` filter function, which will return the minimum of a sequence of values. If any of the values are not comparable, we'll return the special `undefined` value instead. ```python from typing import Iterable import jsonpath from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction class MinFilterFunction(FilterFunction): """A JSONPath function extension returning the minimum of a sequence.""" arg_types = [ExpressionType.VALUE] return_type = ExpressionType.VALUE def __call__(self, value: object) -> object: if not isinstance(value, Iterable): return jsonpath.UNDEFINED try: return min(value) except TypeError: return jsonpath.UNDEFINED env = jsonpath.JSONPathEnvironment() env.function_extensions["min"] = MinFilterFunction() example_data = {"foo": [{"bar": [4, 5]}, {"bar": [1, 5]}]} print(env.findall("$.foo[?min(@.bar) > 1]", example_data)) ``` Now, when we use `env.findall()`, `env.finditer()` or `env.compile()`, our `min` function will be available for use in filter expressions. ```text $..products[?@.price == min($..products.price)] ``` ### Built-in Functions The [built-in functions](functions.md) can be removed from a `JSONPathEnvironment` by deleting the entry from `function_extensions`. ```python import jsonpath env = jsonpath.JSONPathEnvironment() del env.function_extensions["keys"] ``` Or aliased with an additional entry. ```python import jsonpath env = jsonpath.JSONPathEnvironment() env.function_extensions["properties"] = env.function_extensions["keys"] ``` Alternatively, you could subclass `JSONPathEnvironment` and override the `setup_function_extensions` method. ```python from typing import Iterable import jsonpath class MyEnv(jsonpath.JSONPathEnvironment): def setup_function_extensions(self) -> None: super().setup_function_extensions() self.function_extensions["properties"] = self.function_extensions["keys"] self.function_extensions["min"] = min_filter def min_filter(obj: object) -> object: if not isinstance(obj, Iterable): return jsonpath.UNDEFINED try: return min(obj) except TypeError: return jsonpath.UNDEFINED env = MyEnv() ``` ### Compile Time Validation Calls to [type-aware](#type-system-for-function-expressions) function extension are validated at JSONPath compile-time automatically. If [`well_typed`](api.md#jsonpath.JSONPathEnvironment.well_typed) is set to `False` or a custom function extension does not inherit from [`FilterFunction`](api.md#jsonpath.function_extensions.FilterFunction), its arguments can be validated by implementing the function as a class with a `__call__` method, and a `validate` method. `validate` will be called after parsing the function, giving you the opportunity to inspect its arguments and raise a `JSONPathTypeError` should any arguments be unacceptable. If defined, `validate` must take a reference to the current environment, an argument list and the token pointing to the start of the function call. ```python def validate( self, env: JSONPathEnvironment, args: List[FilterExpression], token: Token, ) -> List[FilterExpression]: ``` It should return an argument list, either the same as the input argument list, or a modified version of it. See the implementation of the built-in [`match` function](https://github.com/jg-rp/python-jsonpath/blob/main/jsonpath/function_extensions/match.py) for an example. ## Custom Environments Python JSONPath can be customized by subclassing [`JSONPathEnvironment`](api.md#jsonpath.JSONPathEnvironment) and overriding class attributes and/or methods. Then using `findall()`, `finditer()` and `compile()` methods of that subclass. ### Identifier Tokens The default identifier tokens, like `$` and `@`, can be changed by setting attributes on a `JSONPathEnvironment`. This example sets the root token (default `$`) to be `^`. ```python import JSONPathEnvironment class MyJSONPathEnvironment(JSONPathEnvironment): root_token = "^" data = { "users": [ {"name": "Sue", "score": 100}, {"name": "John", "score": 86}, {"name": "Sally", "score": 84}, {"name": "Jane", "score": 55}, ], "limit": 100, } env = MyJSONPathEnvironment() user_names = env.findall( "^.users[?@.score < ^.limit].name", data, ) ``` This table shows all available identifier token attributes. | attribute | default | | -------------------- | ------- | | filter_context_token | `_` | | keys_token | `#` | | root_token | `$` | | self_token | `@` | ### Logical Operator Tokens By default, we accept both Python and C-style logical operators in filter expressions. That is, `not` and `!` are equivalent, `and` and `&&` are equivalent and `or` and `||` are equivalent. You can change this using class attributes on a [`Lexer`](custom_api.md#jsonpath.lex.Lexer) subclass and setting the `lexer_class` attribute on a `JSONPathEnvironment`. This example changes all three logical operators to strictly match the JSONPath spec. ```python from jsonpath import JSONPathEnvironment from jsonpath import Lexer class MyLexer(Lexer): logical_not_pattern = r"!" logical_and_pattern = r"&&" logical_or_pattern = r"\|\|" class MyJSONPathEnvironment(JSONPathEnvironment): lexer_class = MyLexer env = MyJSONPathEnvironment() env.compile("$.foo[?@.a > 0 && @.b < 100]") # OK env.compile("$.foo[?@.a > 0 and @.b < 100]") # JSONPathSyntaxError ``` ### Keys Selector The non-standard keys selector is used to retrieve the keys/properties from a JSON Object or Python mapping. It defaults to `~` and can be changed using the `keys_selector_token` attribute on a [`JSONPathEnvironment`](./api.md#jsonpath.JSONPathEnvironment) subclass. This example changes the keys selector to `*~`. ```python from jsonpath import JSONPathEnvironment class MyJSONPathEnvironment(JSONPathEnvironment): keys_selector_token = "*~" data = { "users": [ {"name": "Sue", "score": 100}, {"name": "John", "score": 86}, {"name": "Sally", "score": 84}, {"name": "Jane", "score": 55}, ], "limit": 100, } env = MyJSONPathEnvironment() print(env.findall("$.users[0].*~", data)) # ['name', 'score'] ``` ### Array Index Limits Python JSONPath limits the minimum and maximum JSON array or Python sequence indices (including slice steps) allowed in a JSONPath query. The default minimum allowed index is set to `-(2**53) + 1`, and the maximum to `(2**53) - 1`. When a limit is reached, a `JSONPathIndexError` is raised. You can change the minimum and maximum allowed indices using the `min_int_index` and `max_int_index` attributes on a [`JSONPathEnvironment`](./api.md#jsonpath.JSONPathEnvironment) subclass. ```python from jsonpath import JSONPathEnvironment class MyJSONPathEnvironment(JSONPathEnvironment): min_int_index = -100 max_int_index = 100 env = MyJSONPathEnvironment() query = env.compile("$.users[999]") # jsonpath.exceptions.JSONPathIndexError: index out of range, line 1, column 8 ``` jg-rp-python-jsonpath-830094f/docs/api.md000066400000000000000000000011021512714264000201720ustar00rootroot00000000000000# API Reference ::: jsonpath.JSONPathEnvironment handler: python ::: jsonpath.JSONPathMatch handler: python ::: jsonpath.JSONPath handler: python ::: jsonpath.CompoundJSONPath handler: python ::: jsonpath.Query handler: python ::: jsonpath.Projection handler: python ::: jsonpath.function_extensions.FilterFunction handler: python ::: jsonpath.function_extensions.ExpressionType handler: python ::: jsonpath.JSONPointer handler: python ::: jsonpath.RelativeJSONPointer handler: python ::: jsonpath.JSONPatch handler: python jg-rp-python-jsonpath-830094f/docs/async.md000066400000000000000000000045241512714264000205510ustar00rootroot00000000000000# Async API Largely motivated by its integration with [Python Liquid](https://jg-rp.github.io/liquid/jsonpath/introduction), Python JSONPath offers an asynchronous API that allows for items in a target data structure to be "fetched" lazily. [`findall_async()`](api.md#jsonpath.JSONPathEnvironment.findall_async) and [`finditer_async()`](api.md#jsonpath.JSONPathEnvironment.finditer_async) are [asyncio](https://docs.python.org/3/library/asyncio.html) equivalents to [`findall()`](api.md#jsonpath.JSONPathEnvironment.findall) and [`finditer()`](api.md#jsonpath.JSONPathEnvironment.finditer). By default, any class implementing the [mapping](https://docs.python.org/3/library/collections.abc.html#collections.abc.Mapping) or [sequence](https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence) interfaces, and a `__getitem_async__()` method, will have `__getitem_async__()` awaited instead of calling `__getitem__()` when resolving mapping keys or sequence indices. ## Example In this example, showing a lazy-loading collections of `Player` objects, only the "A" team's players are fetched from the database, and only when they are first accessed. ```python from collections import abc from dataclasses import dataclass from typing import Dict from typing import Iterator from typing import List import jsonpath @dataclass class Player: name: str pid: int rank: int class LazyPlayers(abc.Mapping[str, Player]): def __init__(self, names: List[str]): self.names = names self.cached_players: Dict[str, Player] = {} def __len__(self) -> int: return len(self.names) def __iter__(self) -> Iterator[str]: return iter(self.names) def __getitem__(self, k: str) -> Player: if self.cached_players is None: # Blocking IO here self.cached_players = get_stuff_from_database() return self.cached_players[k] async def __getitem_async__(self, k: str) -> Player: if self.cached_players is None: # Do async IO here. self.cached_players = await get_stuff_from_database_async() return self.cached_players[k] data = { "teams": { "A Team": LazyPlayers(["Sue", "Bob"]), "B Team": LazyPlayers(["Sally", "Frank"]), } } best_a_team_players = jsonpath.findall_async("$.teams['A Team'][?rank >= 8]", data) ``` jg-rp-python-jsonpath-830094f/docs/cli.md000066400000000000000000000244571512714264000202120ustar00rootroot00000000000000# Command Line Interface **_New in version 0.9.0_** Python JSONPath includes a script called `json`, exposing [JSONPath](quickstart.md#findallpath-data), [JSON Pointer](quickstart.md#pointerresolvepointer-data) and [JSON Patch](quickstart.md#patchapplypatch-data) features on the command line. Use the `--version` argument to check the current version of Python JSONPath, and the `--help` argument to display command information. ```console $ json --version python-jsonpath, version 0.9.0 ``` ```console $ json --help usage: json [-h] [--debug] [--pretty] [-v] [--no-unicode-escape] COMMAND ... JSONPath, JSON Pointer and JSON Patch utilities. positional arguments: COMMAND path Find objects in a JSON document given a JSONPath. pointer Resolve a JSON Pointer against a JSON document. patch Apply a JSON Patch to a JSON document. optional arguments: -h, --help show this help message and exit --debug Show stack traces. (default: False) --pretty Add indents and newlines to output JSON. (default: False) -v, --version Show the version and exit. --no-unicode-escape Disable decoding of UTF-16 escape sequence within paths and pointers. (default: False) Use [json COMMAND --help] for command specific help. Usage Examples: Find objects in source.json matching a JSONPath, write them to result.json. $ json path -q "$.foo['bar'][?@.baz > 1]" -f source.json -o result.json Resolve a JSON Pointer against source.json, pretty print the result to stdout. $ json --pretty pointer -p "/foo/bar/0" -f source.json Apply JSON Patch patch.json to JSON from stdin, output to result.json. $ cat source.json | json patch /path/to/patch.json -o result.json ``` Use `json COMMAND --help` for command specific help. ```console $ json path --help usage: json path [-h] (-q QUERY | -r PATH_FILE) [-f FILE] [-o OUTPUT] Find objects in a JSON document given a JSONPath. optional arguments: -h, --help show this help message and exit -q QUERY, --query QUERY JSONPath query string. -r PATH_FILE, --path-file PATH_FILE Text file containing a JSONPath query. -f FILE, --file FILE File to read the target JSON document from. Defaults to reading from the standard input stream. -o OUTPUT, --output OUTPUT File to write resulting objects to, as a JSON array. Defaults to the standard output stream. --no-type-checks Disables filter expression well-typedness checks. --strict Compile and evaluate JSONPath expressions with strict compliance with RFC 9535. ``` ## Global Options These arguments apply to any subcommand and must be listed before the command. ### `--debug` Enable debugging. Display full stack traces, if available, when errors occur. Without the `--debug` option, the following example shows a short "json path syntax error" message. ```console $ json path -q "$.1" -f /tmp/source.json json path syntax error: unexpected token '1', line 1, column 2 ``` With the `--debug` option, we get the stack trace triggered by `JSONPathSyntaxError`. ```console $ json --debug path -q "$.1" -f /tmp/source.json Traceback (most recent call last): File "/home/james/.local/share/virtualenvs/jsonpath_cli-8Tb3e-ir/bin/json", line 8, in sys.exit(main()) File "/home/james/.local/share/virtualenvs/jsonpath_cli-8Tb3e-ir/lib/python3.9/site-packages/jsonpath/cli.py", line 338, in main args.func(args) File "/home/james/.local/share/virtualenvs/jsonpath_cli-8Tb3e-ir/lib/python3.9/site-packages/jsonpath/cli.py", line 234, in handle_path_command path = jsonpath.compile(args.query or args.path_file.read()) File "/home/james/.local/share/virtualenvs/jsonpath_cli-8Tb3e-ir/lib/python3.9/site-packages/jsonpath/env.py", line 148, in compile _path: Union[JSONPath, CompoundJSONPath] = JSONPath( File "/home/james/.local/share/virtualenvs/jsonpath_cli-8Tb3e-ir/lib/python3.9/site-packages/jsonpath/path.py", line 49, in __init__ self.selectors = tuple(selectors) File "/home/james/.local/share/virtualenvs/jsonpath_cli-8Tb3e-ir/lib/python3.9/site-packages/jsonpath/parse.py", line 256, in parse raise JSONPathSyntaxError( jsonpath.exceptions.JSONPathSyntaxError: unexpected token '1', line 1, column 2 ``` ### `--pretty` Enable pretty formatting when outputting JSON. Adds newlines and indentation to output specified with the `-o` or `--output` option. Without the `--pretty` option, the following example output is on one line. ```console $ json pointer -p "/categories/1/products/0" -f /tmp/source.json {"title": "Cap", "description": "Baseball cap", "price": 15.0} ``` With the `--pretty` option, we get nicely formatted JSON output. ```console $ json --pretty pointer -p "/categories/1/products/0" -f /tmp/source.json { "title": "Cap", "description": "Baseball cap", "price": 15.0 } ``` ### `--no-unicode-escape` Disable decoding of UTF-16 escape sequences, including surrogate paris. This can improve performance if you know your paths and pointers don't contain UTF-16 escape sequences. ```console $ json --no-unicode-escape path -q "$.price_cap" -f /tmp/source.json ``` ## Commands One of the subcommands `path`, `pointer` or `patch` must be specified, depending on whether you want to search a JSON document with a JSONPath, resolve a JSON Pointer against a JSON document or apply a JSON Patch to a JSON Document. ### `path` Find objects in a JSON document given a JSONPath. One of `-q`/`--query` or `-r`/`--path-file` must be given. `-q` being a JSONPath given on the command line as a string, `-r` being the path to a file containing a JSONPath. ``` json path [-h] (-q QUERY | -r PATH_FILE) [-f FILE] [-o OUTPUT] ``` #### `-q` / `--query` The JSONPath as a string. ```console $ json path -q "$.price_cap" -f /tmp/source.json ``` ```console $ json path --query "$.price_cap" -f /tmp/source.json ``` #### `-r` / `--path-file` The path to a file containing a JSONPath. ```console $ json path -r /tmp/path.txt -f /tmp/source.json ``` ```console $ json path --path-file /tmp/path.txt -f /tmp/source.json ``` #### `-f` / `--file` The path to a file containing the target JSON document. If omitted or a hyphen (`-`), the target JSON document will be read from the standard input stream. ```console $ json path -q "$.price_cap" -f /tmp/source.json ``` ```console $ json path -q "$.price_cap" --file /tmp/source.json ``` #### `-o` / `--output` The path to a file to write resulting objects to, as a JSON array. If omitted or a hyphen (`-`) is given, results will be written to the standard output stream. ```console $ json path -q "$.price_cap" -f /tmp/source.json -o result.json ``` ```console $ json path -q "$.price_cap" -f /tmp/source.json --output result.json ``` #### `--no-type-checks` _New in version 0.10.0_ Disables JSONPath filter expression well-typedness checks. The well-typedness of a filter expression is defined by RFC 9535. #### `--strict` _New in version 2.0.0_ Compile and evaluate JSONPath expressions with strict compliance with RFC 9535. ### `pointer` Resolve a JSON Pointer against a JSON document. One of `-p`/`--pointer` or `-r`/`--pointer-file` must be given. `-p` being a JSON Pointer given on the command line as a string, `-r` being the path to a file containing a JSON Pointer. ``` json pointer [-h] (-p POINTER | -r POINTER_FILE) [-f FILE] [-o OUTPUT] [-u] ``` #### `-p` / `--pointer` An RFC 6901 formatted JSON Pointer string. ```console $ json pointer -p "/categories/0/name" -f /tmp/source.json ``` ```console $ json pointer --pointer "/categories/0/name" -f /tmp/source.json ``` #### `-r` / `--pointer-file` The path to a file containing a JSON Pointer. ```console $ json pointer -r /tmp/pointer.txt -f /tmp/source.json ``` ```console $ json pointer --pointer-file /tmp/pointer.txt -f /tmp/source.json ``` #### `-f` / `--file` The path to a file containing the target JSON document. If omitted or a hyphen (`-`), the target JSON document will be read from the standard input stream. ```console $ json pointer -p "/categories/0/name" -f /tmp/source.json ``` ```console $ json pointer -p "/categories/0/name" --file /tmp/source.json ``` #### `-o` / `--output` The path to a file to write the resulting object to. If omitted or a hyphen (`-`) is given, results will be written to the standard output stream. ```console $ json pointer -p "/categories/0/name" -f /tmp/source.json -o result.json ``` ```console $ json pointer -p "/categories/0/name" -f /tmp/source.json --output result.json ``` #### `-u` / `--uri-decode` Enable URI decoding of the JSON Pointer. In this example, we would look for a property called "hello world" in the root of the target document. ```console $ json pointer -p "/hello%20world" -f /tmp/source.json -u ``` ```console $ json pointer -p "/hello%20world" -f /tmp/source.json --uri-decode ``` ### `patch` Apply a JSON Patch to a JSON document. Unlike `path` and `pointer` commands, a patch can't be given as a string argument. `PATCH` is a positional argument that should be a file path to a JSON Patch document or a hyphen (`-`), which means the patch document will be read from the standard input stream. ``` json patch [-h] [-f FILE] [-o OUTPUT] [-u] PATCH ``` These examples read the patch from `patch.json` and the document to modify from `target.json` ```console $ json patch /tmp/patch.json -f /tmp/target.json ``` ```console $ cat /tmp/patch.json | json patch - -f /tmp/target.json ``` #### `-f` / `--file` The path to a file containing the target JSON document. If omitted or a hyphen (`-`), the target JSON document will be read from the standard input stream. ```console $ json patch /tmp/patch.json -f /tmp/target.json ``` ```console $ json patch /tmp/patch.json --file /tmp/target.json ``` #### `-o` / `--output` The path to a file to write the resulting object to. If omitted or a hyphen (`-`) is given, results will be written to the standard output stream. ```console $ json patch /tmp/patch.json -f /tmp/target.json -o result.json ``` ```console $ json patch /tmp/patch.json -f /tmp/target.json --output result.json ``` #### `-u` / `--uri-decode` Enable URI decoding of JSON Pointers in the patch document. ```console $ json patch /tmp/patch.json -f /tmp/target.json -u ``` ```console $ json patch /tmp/patch.json -f /tmp/target.json --uri-decode ``` jg-rp-python-jsonpath-830094f/docs/convenience.md000066400000000000000000000010711512714264000217220ustar00rootroot00000000000000# Convenience Functions These package-level functions use the default [JSONPathEnvironment](api.md#jsonpath.JSONPathEnvironment), `jsonpath.DEFAULT_ENV` when `strict=False`, or the preconfigured strict environment, `jsonpath.STRICT_ENV` when `strict=True`. ::: jsonpath.compile handler: python ::: jsonpath.findall handler: python ::: jsonpath.finditer handler: python ::: jsonpath.findall_async handler: python ::: jsonpath.finditer_async handler: python ::: jsonpath.match handler: python ::: jsonpath.query handler: python jg-rp-python-jsonpath-830094f/docs/css/000077500000000000000000000000001512714264000176755ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/docs/css/style.css000066400000000000000000000016171512714264000215540ustar00rootroot00000000000000/* Indentation. */ div.doc-contents:not(.first) { padding-left: 25px; border-left: .05rem solid var(--md-typeset-table-color); } /* Mark external links as such. */ a.autorefs-external::after { /* https://primer.style/octicons/arrow-up-right-24 */ background-image: url('data:image/svg+xml,'); content: ' '; display: inline-block; position: relative; top: 0.1em; margin-left: 0.2em; margin-right: 0.1em; height: 1em; width: 1em; border-radius: 100%; background-color: var(--md-typeset-a-color); } a.autorefs-external:hover::after { background-color: var(--md-accent-fg-color); } jg-rp-python-jsonpath-830094f/docs/custom_api.md000066400000000000000000000004471512714264000215770ustar00rootroot00000000000000# Low Level API Reference ::: jsonpath.token.Token handler: python ::: jsonpath.filter.FilterExpression handler: python ::: jsonpath.lex.Lexer handler: python ## jsonpath.parse.Parser TODO: ## jsonpath.selectors.JSONPathSelector TODO: ## jsonpath.stream.TokenStream TODO: jg-rp-python-jsonpath-830094f/docs/exceptions.md000066400000000000000000000021211512714264000216040ustar00rootroot00000000000000# Exceptions Each of the following exceptions has a `token` property, referencing the [`Token`](custom_api.md#jsonpath.token.Token) that caused the error. You can use [`Token.position()`](custom_api.md#jsonpath.token.Token.position) to get the token's line and column number. ::: jsonpath.JSONPathError handler: python ::: jsonpath.JSONPathSyntaxError handler: python ::: jsonpath.JSONPathTypeError handler: python ::: jsonpath.JSONPathIndexError handler: python ::: jsonpath.JSONPathNameError handler: python ::: jsonpath.JSONPointerError handler: python ::: jsonpath.JSONPointerResolutionError handler: python ::: jsonpath.JSONPointerIndexError handler: python ::: jsonpath.JSONPointerKeyError handler: python ::: jsonpath.JSONPointerTypeError handler: python ::: jsonpath.RelativeJSONPointerError handler: python ::: jsonpath.RelativeJSONPointerIndexError handler: python ::: jsonpath.RelativeJSONPointerSyntaxError handler: python ::: jsonpath.JSONPatchError handler: python ::: jsonpath.JSONPatchTestFailure handler: python jg-rp-python-jsonpath-830094f/docs/functions.md000066400000000000000000000134101512714264000214360ustar00rootroot00000000000000# Filter Functions A filter function is a named function that can be called as part of a [filter selector](syntax.md#filter-selector). Here we describe built in filters. You can [define your own function extensions](advanced.md#function-extensions) too. !!! note If you pass `strict=True` when calling [`findall()`](convenience.md#jsonpath.findall), [`finditer()`](convenience.md#jsonpath.finditer), etc., Only standard functions - those defined by RFC 9535 - will be enabled. The standard functions are `count`, `length`, `match`, `search` and `value`. ## `count()` ```text count(obj: object) -> Optional[int] ``` Return the number of items in _obj_. If the object does not respond to Python's `len()` function, `None` is returned. ``` $.categories[?count(@.products.*) >= 2] ``` ## `isinstance()` **_New in version 0.6.0_** ```text isinstance(obj: object, t: str) -> bool ``` Return `True` if the type of _obj_ matches _t_. This function allows _t_ to be one of several aliases for the real Python "type". Some of these aliases follow JavaScript/JSON semantics. | type | aliases | | --------------------- | ------------------------------------ | | UNDEFINED | "undefined", "missing" | | None | "null", "nil", "None", "none" | | str | "str", "string" | | Sequence (array-like) | "array", "list", "sequence", "tuple" | | Mapping (dict-like) | "object", "dict", "mapping" | | bool | "bool", "boolean" | | int | "number", "int" | | float | "number", "float" | For example : ``` $.categories[?isinstance(@.length, 'number')] ``` And `is()` is an alias for `isinstance()`: ``` $.categories[?is(@.length, 'number')] ``` ## `keys()` **_New in version 2.0.0_** ``` keys(value: object) -> Tuple[str, ...] | Nothing ``` Return a list of keys from an object/mapping. If `value` does not have a `keys()` method, the special _Nothing_ value is returned. !!! note `keys()` is not registered with the default JSONPath environment. The [keys selector](syntax.md#keys-selector) and [keys filter selector](syntax.md#keys-filter-selector) are usually the better choice when strict compliance with the specification is not needed. You can register `keys()` with your JSONPath environment like this: ```python from jsonpath import JSONPathEnvironment from jsonpath import function_extensions env = JSONPathEnvironment() env.function_extensions["keys"] = function_extensions.Keys() ``` ``` $.some[?'thing' in keys(@)] ``` ## `length()` ```text length(obj: object) -> Optional[int] ``` Return the number of items in the input object. If the object does not respond to Python's `len()` function, `None` is returned. ``` $.categories[?length(@) > 1] ``` ## `match()` ```text match(obj: object, pattern: str) -> bool ``` Return `True` if _obj_ is a string and is a full match to the regex _pattern_. ```text $..products[?match(@.title, ".+ainers.+")] ``` If _pattern_ is a string literal, it will be compiled at compile time, and raise a `JSONPathTypeError` at compile time if it's invalid. If _pattern_ is a query and the result is not a valid regex, `False` is returned. ## `search()` ```text search(obj: object, pattern: str) -> bool ``` Return `True` if _obj_ is a string and it contains the regexp _pattern_. ```text $..products[?search(@.title, "ainers")] ``` If _pattern_ is a string literal, it will be compiled at compile time, and raise a `JSONPathTypeError` at compile time if it's invalid. If _pattern_ is a query and the result is not a valid regex, `False` is returned. ## `startswith()` **_New in version 2.0.0_** ``` startswith(value: str, prefix: str) -> bool ``` Return `True` if `value` starts with `prefix`. If `value` or `prefix` are not strings, `False` is returned. ``` $[?startswith(@, 'ab')] ``` ## `typeof()` **_New in version 0.6.0_** ```text typeof(obj: object) -> str ``` Return the type of _obj_ as a string. The strings returned from this function use JavaScript/JSON terminology like "string", "array" and "object", much like the result of JavaScript's `typeof` operator. ``` $.categories[?typeof(@.length) == 'number'] ``` `type()` is and alias for `typeof()`. `jsonpath.function_extensions.TypeOf` takes a `single_number_type` argument, which controls the behavior of `typeof()` when given and int or float. By default, `single_number_type` is `True` and `"number"` is returned. Register a new instance of `TypeOf` with a `JSONPathEnvironment` with `single_number_type` set to `False` and `"int"` and `"float"` will be returned when given integers and floats, respectively. | instance | type string | | --------------------- | ------------------------------------------------------ | | UNDEFINED | "undefined" | | None | "null" | | str | "string" | | Sequence (array-like) | "array" | | Mapping (dict-like) | "object" | | bool | "boolean" | | int | "number" or "int" if `single_number_type` is `False` | | float | "number" or "float" if `single_number_type` is `False` | ## `value()` ``` value(nodes: object) -> object | undefined ``` Return the first value from _nodes_ resulting from a JSONPath query, if there is only one node, or `undefined` otherwise. ```text $..products[?value(@.price) == 9] ``` jg-rp-python-jsonpath-830094f/docs/index.md000066400000000000000000000106441512714264000205430ustar00rootroot00000000000000# Python JSONPath JSONPath is a mini language for selecting values from data formatted in JavaScript Object Notation, or equivalent Python objects, like dictionaries and lists. Python JSONPath is a non-evaluating, read-only implementation of JSONPath, suitable for situations where JSONPath query authors are untrusted. We follow [RFC 9535](https://datatracker.ietf.org/doc/html/rfc9535) and test against the [JSONPath Compliance Test Suite](https://github.com/jsonpath-standard/jsonpath-compliance-test-suite). We also include implementations of [JSON Pointer](pointers.md) ([RFC 6901](https://datatracker.ietf.org/doc/html/rfc6901)) and [JSON Patch](api.md#jsonpath.JSONPatch) ([RFC 6902](https://datatracker.ietf.org/doc/html/rfc6902)), plus methods for converting a [JSONPathMatch](api.md#jsonpath.JSONPathMatch) to a `JSONPointer`. ## Install Install Python JSONPath using [pip](https://pip.pypa.io/en/stable/getting-started/): ```console pip install python-jsonpath ``` Or [Pipenv](https://pipenv.pypa.io/en/latest/): ```console pipenv install python-jsonpath ``` Or [pipx](https://pypa.github.io/pipx/) ```console pipx install python-jsonpath ``` Or from [conda-forge](https://anaconda.org/conda-forge/python-jsonpath): ```console conda install -c conda-forge python-jsonpath ``` ### Optional dependencies Python JSONPath works out of the box with **no extra dependencies**, and its syntax is already **very close** to [RFC 9535](https://www.rfc-editor.org/rfc/rfc9535). For strict compliance with the specification, [strict mode](syntax.md) and the `strict` extra were added in **version 2.0.0**. ```console pip install python-jsonpath[strict] ``` This installs [`regex`](https://pypi.org/project/regex/) and [`iregexp-check`](https://pypi.org/project/iregexp-check/), enabling: - [`match()`](functions.md#match) and [`search()`](functions.md#search) to use `regex` instead of Python's built-in `re` module. - Validation of regular expressions against [RFC 9485](https://datatracker.ietf.org/doc/html/rfc9485). See the [syntax guide](syntax.md) for strict mode details and specification extensions. ## Example ```python import jsonpath example_data = { "categories": [ { "name": "footwear", "products": [ { "title": "Trainers", "description": "Fashionable trainers.", "price": 89.99, }, { "title": "Barefoot Trainers", "description": "Running trainers.", "price": 130.00, }, ], }, { "name": "headwear", "products": [ { "title": "Cap", "description": "Baseball cap", "price": 15.00, }, { "title": "Beanie", "description": "Winter running hat.", "price": 9.00, }, ], }, ], "price_cap": 10, } products = jsonpath.findall("$..products.*", example_data) print(products) ``` Which results in a list of all products from all categories: ```json [ { "title": "Trainers", "description": "Fashionable trainers.", "price": 89.99 }, { "title": "Barefoot Trainers", "description": "Running trainers.", "price": 130.0 }, { "title": "Cap", "description": "Baseball cap", "price": 15.0 }, { "title": "Beanie", "description": "Winter running hat.", "price": 9.0 } ] ``` Or, reading data from a JSON formatted file: ```python import jsonpath with open("some.json") as fd: products = jsonpath.findall("$..products.*", fd) print(products) ``` You could use Python JSONPath on data read from a YAML formatted file too, or any data format that can be loaded into dictionaries and lists. If you have [PyYAML](https://pyyaml.org/wiki/PyYAML) installed: ```python import jsonpath import yaml with open("some.yaml") as fd: data = yaml.safe_load(fd) products = jsonpath.findall("$..products.*", data) print(products) ``` ## Next Steps Have a read through the [Quick Start](quickstart.md) and [High Level API Reference](api.md), or the default [JSONPath Syntax](syntax.md) supported by Python JSONPath. If you're interested in customizing JSONPath, take a look at [Advanced Usage](advanced.md) and the [Low Level API Reference](custom_api.md). jg-rp-python-jsonpath-830094f/docs/pointers.md000066400000000000000000000157561512714264000213100ustar00rootroot00000000000000# JSON Pointers **_New in version 0.8.0_** JSON Pointer ([RFC 6901](https://datatracker.ietf.org/doc/html/rfc6901)) is a string syntax for targeting a single value (JSON object, array, or scalar) within a JSON document. Unlike a JSONPath expression, which can yield multiple values, a JSON Pointer resolves to **at most one value**. JSON Pointers are a fundamental component of JSON Patch ([RFC 6902](https://datatracker.ietf.org/doc/html/rfc6902)), where each patch operation must have at least one pointer identifying the target location to modify. ??? note "Extensions to RFC 6901" We have extended RFC 6901 to support: - Interoperability with the JSONPath [keys selector](syntax.md#keys-selector) (`~`) - A special non-standard syntax for targeting **keys or indices themselves**, used in conjunction with [Relative JSON Pointer](#torel) **Keys Selector Compatibility** The JSONPath **keys selector** (`.~` or `[~]`) allows expressions to target the *keys* of an object, rather than their associated values. To maintain compatibility when translating between JSONPath and JSON Pointer, our implementation includes special handling for this selector. While standard JSON Pointers always refer to values, we ensure that paths derived from expressions like `$.categories.~` can be represented in our pointer system. This is especially important when converting from JSONPath to JSON Pointer or when evaluating expressions that mix value and key access. **Key/Index Pointers (`#`)** This non-standard pointer form represents **keys or indices themselves**, not the values they map to. Examples: - `#foo` points to the object key `"foo"` (not the value at `"foo"`) - `#0` points to the index `0` of an array (not the value at that index) This syntax is introduced to support the full capabilities of [Relative JSON Pointer](#torel), which allows references to both values and the *keys or indices* that identify them. To ensure that any `RelativeJSONPointer` can be losslessly converted into a `JSONPointer`, we use the `#` form to represent these special cases. #### Example ```python from jsonpath import RelativeJSONPointer rjp = RelativeJSONPointer("1#") print(repr(rjp.to("/items/0/name"))) # JSONPointer('/items/#0') ``` ## `resolve(data)` Resolve this pointer against _data_. _data_ can be a file-like object or string containing JSON formatted data, or a Python [`Mapping`](https://docs.python.org/3/library/collections.abc.html#collections.abc.Mapping) or [`Sequence`](https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence), like a dictionary or list. ```python from jsonpath import JSONPointer example_data = {"foo": {"bar": [1, 2, 3]}} pointer = JSONPointer("/foo/bar/0") print(pointer.resolve(example_data)) # 1 ``` ## `resolve_parent(data)` Resolve this pointer against _data_, return the object and its parent as a `(parent, object)` tuple. If _object_ does not exist in _data_ but _parent_ does, `(parent, UNDEFINED)` will be returned. Where `jsonpath.pointer.UNDEFINED` indicates the lack of a value. If this pointer points to the JSON document root, parent will be `None`. ```python from jsonpath import JSONPointer example_data = {"foo": {"bar": [1, 2, 3]}} pointer = JSONPointer("/foo/bar/0") print(pointer.resolve_parent(example_data)) # ([1, 2, 3], 1) # 'thing' does not exist pointer = JSONPointer("/foo/thing") print(pointer.resolve_parent(example_data)) # ({'bar': [1, 2, 3]}, ) pointer = JSONPointer("") print(pointer.resolve_parent(example_data)) # (None, {'foo': {'bar': [1, 2, 3]}}) ``` ## `exists(data)` **_New in version 0.9.0_** Return _True_ if this pointer can be resolved against _data_, or _False_ otherwise. Note that `JSONPointer.resolve()` can return legitimate falsy values that form part of the target JSON document. This method will return `True` if a falsy value is found. ```python from jsonpath import JSONPointer example_data = {"foo": {"bar": [1, 2, 3]}, "baz": False} pointer = JSONPointer("/foo/bar/0") print(pointer.exists(example_data)) # True pointer = JSONPointer("/foo/bar/9") print(pointer.exists(example_data)) # False pointer = JSONPointer("/baz") print(pointer.exists(example_data)) # True ``` ## `join(*parts)` **_New in version 0.9.0_** Join this pointer with _parts_. Each part is expected to be a JSON Pointer string, possibly without a leading slash. If a part does have a leading slash, the previous pointer is ignored and a new `JSONPointer` is created, and processing of remaining parts continues. `join()` is equivalent to using the slash (`/`) operator for each argument. ```python from jsonpath import JSONPointer pointer = JSONPointer("/foo/bar") print(pointer) # /foo/bar print(pointer.join("baz")) # /foo/bar/baz print(pointer.join("baz", "0")) # /foo/bar/baz/0 ``` ## `parent()` **_New in version 0.9.0_** Return this pointer's parent as a new `JSONPointer`. If this pointer points to the document root, _self_ is returned. ```python from jsonpath import JSONPointer pointer = JSONPointer("/foo/bar") print(pointer) # /foo/bar print(pointer.parent()) # /foo ``` ## `is_relative_to(pointer)` Return _True_ if this pointer points to a child of the argument pointer, which must be a `JSONPointer` instance. ```python from jsonpath import JSONPointer pointer = JSONPointer("/foo/bar") another_pointer = JSONPointer("/foo/bar/0") print(another_pointer.is_relative_to(pointer)) # True another_pointer = JSONPointer("/foo/baz") print(another_pointer.is_relative_to(pointer)) # False ``` ## `to(rel)` **_New in version 0.9.0_** Return a new `JSONPointer` relative to this pointer. _rel_ should be a [`RelativeJSONPointer`](api.md#jsonpath.RelativeJSONPointer) instance or a string following [Relative JSON Pointer](https://www.ietf.org/id/draft-hha-relative-json-pointer-00.html) syntax. ```python from jsonpath import JSONPointer data = {"foo": {"bar": [1, 2, 3], "baz": [4, 5, 6]}} pointer = JSONPointer("/foo/bar/2") print(pointer.resolve(data)) # 3 print(pointer.to("0-1").resolve(data)) # 2 print(pointer.to("2/baz/2").resolve(data)) # 6 ``` A `RelativeJSONPointer` can be instantiated for repeated application to multiple different pointers. ```python from jsonpath import JSONPointer from jsonpath import RelativeJSONPointer data = {"foo": {"bar": [1, 2, 3], "baz": [4, 5, 6], "some": "thing"}} some_pointer = JSONPointer("/foo/bar/0") another_pointer = JSONPointer("/foo/baz/2") rel = RelativeJSONPointer("2/some") print(rel.to(some_pointer).resolve(data)) # thing print(rel.to(another_pointer).resolve(data)) # thing ``` ## Slash Operator **_New in version 0.9.0_** The slash operator allows you to create pointers that are children of an existing pointer. ```python from jsonpath import JSONPointer pointer = JSONPointer("/users") child_pointer = pointer / "score" / "0" another_child_pointer = pointer / "score/1" print(child_pointer) # "/users/score/0" print(another_child_pointer) # "/users/score/1" ``` jg-rp-python-jsonpath-830094f/docs/query.md000066400000000000000000000156671512714264000206130ustar00rootroot00000000000000# Query Iterators **_New in version 1.1.0_** In addition to [`findall()`](api.md#jsonpath.JSONPathEnvironment.findall) and [`finditer()`](api.md#jsonpath.JSONPathEnvironment.finditer), covered in the [quick start guide](./quickstart.md), Python JSONPath offers a fluent _query iterator_ interface. [`Query`](api.md#jsonpath.Query) objects provide chainable methods for manipulating a [`JSONPathMatch`](api.md#jsonpath.JSONPathMatch) iterator, like you'd get from `finditer()`. Obtain a `Query` object using the package-level `query()` function, [`JSONPathEnvironment.query()`](api.md#jsonpath.JSONPathEnvironment.query) or using the [`query()`](api.md#jsonpath.JSONPath.query) method of a compiled JSONPath. This example uses the query API to skip the first five matches, limit the total number of matches to ten, then get the value associated with each match. ```python from jsonpath import query # data = ... values = ( query("$.some[?@.thing]", data) .skip(5) .limit(10) .values() ) for value in values: # ... ``` `Query` objects are iterable and can only be iterated once. Pass the query to `list()` (or other sequence) to get a list of results that can be iterated multiple times or otherwise manipulated. ```python from jsonpath import query # data = ... values = list( query("$.some[?@.thing]", data) .skip(5) .limit(10) .values() ) print(values[1]) ``` ## Chainable methods The following `Query` methods all return `self` (the same `Query` instance), so method calls can be chained to further manipulate the underlying iterator. | Method | Aliases | Description | | --------------- | --------------- | -------------------------------------------------- | | `skip(n: int)` | `drop` | Drop up to _n_ matches from the iterator. | | `limit(n: int)` | `head`, `first` | Yield at most _n_ matches from the iterator. | | `tail(n: int)` | `last` | Drop matches from the iterator up to the last _n_. | ## Terminal methods These are terminal methods of the `Query` class. They can not be chained. | Method | Aliases | Description | | ------------- | ------- | ------------------------------------------------------------------------------------------- | | `values()` | | Return an iterable of objects, one for each match in the iterable. | | `locations()` | | Return an iterable of normalized paths, one for each match in the iterable. | | `items()` | | Return an iterable of (object, normalized path) tuples, one for each match in the iterable. | | `pointers()` | | Return an iterable of `JSONPointer` instances, one for each match in the iterable. | | `first_one()` | `one` | Return the first `JSONPathMatch`, or `None` if there were no matches. | | `last_one()` | | Return the last `JSONPathMatch`, or `None` if there were no matches. | ## Take [`Query.take(self, n: int)`](api.md#jsonpath.Query.take) returns a new `Query` instance, iterating over the next _n_ matches. It leaves the existing query in a safe state, ready to resume iteration of remaining matches. ```python from jsonpath import query it = query("$.some.*", {"some": [0, 1, 2, 3]}) for match in it.take(2): print(match.value) # 0, 1 for value in it.values(): print(value) # 2, 3 ``` ## Tee [`tee()`](api.md#jsonpath.Query.tee) creates multiple independent queries from one query iterator. It is not safe to use the initial `Query` instance after calling `tee()`. ```python from jsonpath import query it1, it2 = query("$.some[?@.thing]", data).tee() head = it1.head(10) # first 10 matches tail = it2.tail(10) # last 10 matches ``` ## Select [`select(*expressions, projection=Projection.RELATIVE)`](api.md/#jsonpath.Query.select) performs JSONPath match projection, selecting a subset of values according to one or more JSONPath query expressions relative to the match location. For example: ```python from jsonpath import query data = { "categories": [ { "name": "footwear", "products": [ { "title": "Trainers", "description": "Fashionable trainers.", "price": 89.99, }, { "title": "Barefoot Trainers", "description": "Running trainers.", "price": 130.00, "social": {"likes": 12, "shares": 7}, }, ], }, { "name": "headwear", "products": [ { "title": "Cap", "description": "Baseball cap", "price": 15.00, }, { "title": "Beanie", "description": "Winter running hat.", "price": 9.00, }, ], }, ], "price_cap": 10, } for product in query("$..products.*", data).select("title", "price"): print(product) ``` Which selects just the `title` and `price` fields for each product. ```text {'title': 'Trainers', 'price': 89.99} {'title': 'Barefoot Trainers', 'price': 130.0} {'title': 'Cap', 'price': 15.0} {'title': 'Beanie', 'price': 9.0} ``` Without the call to `select()`, we'd get all fields in each product object. ```python # ... for product in query("$..products.*", data).values(): print(product) ``` ```text {'title': 'Trainers', 'description': 'Fashionable trainers.', 'price': 89.99} {'title': 'Barefoot Trainers', 'description': 'Running trainers.', 'price': 130.0, 'social': {'likes': 12, 'shares': 7}} {'title': 'Cap', 'description': 'Baseball cap', 'price': 15.0} {'title': 'Beanie', 'description': 'Winter running hat.', 'price': 9.0} ``` We can select nested values too, and arguments to `select()` can be pre-compiled paths. ```python import jsonpath # ... projection = (jsonpath.compile("title"), jsonpath.compile("social.shares")) for product in jsonpath.query("$..products.*", data).select(*projection): print(product) ``` ```text {'title': 'Trainers'} {'title': 'Barefoot Trainers', 'social': {'shares': 7}} {'title': 'Cap'} {'title': 'Beanie'} ``` And flatten the selection into a sequence of values. ```python from jsonpath import Projection # ... for product in query("$..products.*", data).select( "title", "social.shares", projection=Projection.FLAT ): print(product) ``` ```text ['Trainers'] ['Barefoot Trainers', 7] ['Cap'] ['Beanie'] ``` Or project the selection from the JSON value root. ```python # .. for product in query("$..products[?@.social]", data).select( "title", "social.shares", projection=Projection.ROOT, ): print(product) ``` ```text {'categories': [{'products': [{'title': 'Barefoot Trainers', 'social': {'shares': 7}}]}]} ``` jg-rp-python-jsonpath-830094f/docs/quickstart.md000066400000000000000000000232461512714264000216300ustar00rootroot00000000000000# Quick Start This page gets you started using JSONPath, JSON Pointer and JSON Patch wih Python. See [JSONPath Syntax](syntax.md) for information on JSONPath selector syntax. ## `findall(path, data)` Find all values matching a JSONPath query using [`jsonpath.findall()`](convenience.md#jsonpath.findall). This function takes two arguments: - `path`: a JSONPath query as a string (e.g. `"$.users[*].name"`) - `data`: the JSON document to query It **always** returns a list of matched values, even if the path resolves to a single result or nothing at all. The `data` argument can be: - A Python [`Mapping`](https://docs.python.org/3/library/collections.abc.html#collections.abc.Mapping) (e.g. `dict`) or [`Sequence`](https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence) (e.g. `list`) - A JSON-formatted string - A file-like object containing JSON For example, the following query extracts all user names from a dictionary containing a list of user objects: ```python import jsonpath data = { "users": [ { "name": "Sue", "score": 100, }, { "name": "John", "score": 86, }, { "name": "Sally", "score": 84, }, { "name": "Jane", "score": 55, }, ] } user_names = jsonpath.findall("$.users.*.name", data) ``` Where `user_names` is now equal to: ```json ["Sue", "John", "Sally", "Jane"] ``` If the same data were in a file called `users.json`, we might use `findall()` like this: ```python import jsonpath with open("users.json") as fd: user_names = jsonpath.findall("$.users.*.name", fd) ``` ## `finditer(path, data)` Use [`jsonpath.finditer()`](convenience.md#jsonpath.finditer) to iterate over instances of [`jsonpath.JSONPathMatch`](api.md#jsonpath.JSONPathMatch) for every object in _data_ that matches _path_. It accepts the same arguments as [`findall()`](#findallpath-data), a query string and data from which to select matches. ```python import jsonpath data = { "users": [ { "name": "Sue", "score": 100, }, { "name": "John", "score": 86, }, { "name": "Sally", "score": 84, }, { "name": "Jane", "score": 55, }, ] } matches = jsonpath.finditer("$.users.*.name", data) for match in matches: print(matches) ``` The string representation of a [`JSONPathMatch`](api.md#jsonpath.JSONPathMatch) shows the matched object and the canonical path to that object. ```text 'Sue' @ $['users'][0]['name'] 'John' @ $['users'][1]['name'] 'Sally' @ $['users'][2]['name'] 'Jane' @ $['users'][3]['name'] ``` The selected object is available from a [`JSONPathMatch`](api.md#jsonpath.JSONPathMatch) as `obj` and its path, as a string, as `path`. Other useful properties of `JSONPathMatch` include a reference to the parent match, a list of child matches, and a `parts` tuple of keys and indices that make up the path. ## `compile(path)` When you have a JSONPath query that needs to be matched against different data repeatedly, you can compile the path ahead of time using [`jsonpath.compile()`](convenience.md#jsonpath.compile). It takes a query as a string and returns an instance of [`JSONPath`](api.md#jsonpath.JSONPath). `JSONPath` has `findall()` and `finditer()` methods that behave similarly to package-level `findall()` and `finditer()`, just without the `path` argument. ```python import jsonpath some_data = { "users": [ { "name": "Sue", "score": 100, }, { "name": "John", "score": 86, }, ] } other_data = { "users": [ { "name": "Sally", "score": 84, }, { "name": "Jane", "score": 55, }, ] } path = jsonpath.compile("$.users.*.name") some_users = path.findall(some_data) other_users = path.findall(other_data) ``` ## `match(path, data)` **_New in version 0.8.0_** Get a [`jsonpath.JSONPathMatch`](api.md#jsonpath.JSONPathMatch) instance for the first match found in _data_. If there are no matches, `None` is returned. `match()` accepts the same arguments as [`findall()`](#findallpath-data). ```python import jsonpath data = { "users": [ { "name": "Sue", "score": 100, }, { "name": "John", "score": 86, }, { "name": "Sally", "score": 84, }, { "name": "Jane", "score": 55, }, ] } match = jsonpath.match("$.users[?@.score > 85].name", data) if match: print(match) # 'Sue' @ $['users'][0]['name'] print(match.obj) # Sue ``` ## `pointer.resolve(pointer, data)` **_New in version 0.8.0_** Resolves a JSON Pointer ([RFC 6901](https://datatracker.ietf.org/doc/html/rfc6901)) against a JSON document, returning the value located at the specified path. The `pointer` argument can be either: - A string representation of a JSON Pointer (e.g., `"/foo/bar/0"`) - A list of unescaped pointer segments (e.g., `["foo", "bar", "0"]`) The `data` argument can be: - A Python data structure (`dict`, `list`, etc.) - A JSON-formatted string - A file-like object containing JSON ```python from jsonpath import pointer data = { "users": [ { "name": "Sue", "score": 100, }, { "name": "John", "score": 86, }, { "name": "Sally", "score": 84, }, { "name": "Jane", "score": 55, }, ] } sue_score = pointer.resolve("/users/0/score", data) print(sue_score) # 100 jane_score = pointer.resolve(["users", 3, "score"], data) print(jane_score) # 55 ``` If the pointer cannot be resolved against the target JSON data — due to a missing key, an out-of-range index, or an unexpected data type — an exception will be raised: - `JSONPointerKeyError` – when a referenced key is missing from an object - `JSONPointerIndexError` – when an array index is out of bounds - `JSONPointerTypeError` – when a path segment expects the wrong type (e.g., indexing into a non-array) All of these exceptions are subclasses of `JSONPointerResolutionError`. You can optionally provide a `default` value to `resolve()`, which will be returned instead of raising an error if the pointer cannot be resolved. ```python from jsonpath import pointer data = { "users": [ { "name": "Sue", "score": 100, }, { "name": "John", "score": 86, }, ] } sue_score = pointer.resolve("/users/99/score", data, default=0) print(sue_score) # 0 ``` See also [`JSONPathMatch.pointer()`](api.md#jsonpath.JSONPathMatch.pointer), which builds a [`JSONPointer`](api.md#jsonpath.JSONPointer) from a `JSONPathMatch`. ## `patch.apply(patch, data)` **_New in version 0.8.0_** Apply a JSON Patch ([RFC 6902](https://datatracker.ietf.org/doc/html/rfc6902)) to some data. A JSON Patch defines update operation to perform on a JSON document. _patch_ can be a string or file-like object containing a valid JSON Patch document, or an iterable of dictionaries. _data_ is the target JSON document to modify. If _data_ is a string or file-like object, it will be loaded with _json.loads_. Otherwise _data_ should be a JSON-like data structure and will be **modified in place**. ```python from jsonpath import patch patch_operations = [ {"op": "add", "path": "/some/foo", "value": {"foo": {}}}, {"op": "add", "path": "/some/foo", "value": {"bar": []}}, {"op": "copy", "from": "/some/other", "path": "/some/foo/else"}, {"op": "add", "path": "/some/foo/bar/-", "value": 1}, ] data = {"some": {"other": "thing"}} patch.apply(patch_operations, data) print(data) # {'some': {'other': 'thing', 'foo': {'bar': [1], 'else': 'thing'}}} ``` Use the [JSONPatch](api.md#jsonpath.JSONPatch) class to create a patch for repeated application. ```python from jsonpath import JSONPatch patch = JSONPatch( [ {"op": "add", "path": "/some/foo", "value": {"foo": {}}}, {"op": "add", "path": "/some/foo", "value": {"bar": []}}, {"op": "copy", "from": "/some/other", "path": "/some/foo/else"}, {"op": "add", "path": "/some/foo/bar/-", "value": 1}, ] ) data = {"some": {"other": "thing"}} patch.apply(data) print(data) # {'some': {'other': 'thing', 'foo': {'bar': [1], 'else': 'thing'}}} ``` [JSONPatch](api.md#jsonpath.JSONPatch) also offers a builder API for constructing JSON patch documents. We use strings as JSON Pointers in this example, but existing [JSONPointer](api.md#jsonpath.JSONPointer) instances are OK too. ```python from jsonpath import JSONPatch patch = ( JSONPatch() .add("/some/foo", {"foo": []}) .add("/some/foo", {"bar": []}) .copy("/some/other", "/some/foo/else") .add("/some/foo/bar/-", "/some/foo/else") ) data = {"some": {"other": "thing"}} patch.apply(data) print(data) # {'some': {'other': 'thing', 'foo': {'bar': [1], 'else': 'thing'}}} ``` ## What's Next? Read about the [Query Iterators](query.md) API or [user-defined filter functions](advanced.md#function-extensions). Also see how to make extra data available to filters with [Extra Filter Context](advanced.md#filter-variables). `findall()`, `finditer()` and `compile()` are shortcuts that use the default[`JSONPathEnvironment`](api.md#jsonpath.JSONPathEnvironment). `jsonpath.findall(path, data)` is equivalent to: ```python jsonpath.JSONPathEnvironment().compile(path).findall(data) ``` If you would like to customize Python JSONPath, see [Advanced Usage](advanced.md#custom-environments). jg-rp-python-jsonpath-830094f/docs/syntax.md000066400000000000000000000603331512714264000207620ustar00rootroot00000000000000# JSONPath Syntax Python JSONPath extends the [RFC 9535](https://datatracker.ietf.org/doc/html/rfc9535) specification with extra selectors and relaxed rules for convenience. If you need strict compliance with RFC 9535, pass `strict=True` when calling [`findall()`](convenience.md#jsonpath.findall), [`finditer()`](convenience.md#jsonpath.finditer), and similar functions. In strict mode, the syntax and behavior conform to the specification, and no non-standard extensions are registered by default. You can still add them manually if needed. This guide first introduces the standard JSONPath syntax (see the RFC for the formal definition), then explains the non-standard extensions and their semantics. ??? info "Preconfigured JSONPath Environments" Python JSONPath provides two ready-to-use environments: - **Default environment** – includes relaxed syntax, non-standard selectors, and additional function extensions. - **Strict environment** – starts with only the RFC 9535 selectors and functions registered. Non-standard extensions can still be enabled explicitly. For custom setups, subclass [`JSONPathEnvironment`](./api.md#jsonpath.JSONPathEnvironment) and override `setup_function_extensions()`: ```python from jsonpath import JSONPathEnvironment from jsonpath.function_extensions import StartsWith class MyJSONPathEnvironment(JSONPathEnvironment): def __init__(self) -> None: super().__init__(strict=True) def setup_function_extensions(self) -> None: super().setup_function_extensions() self.function_extensions["startswith"] = StartsWith() jsonpath = MyJSONPathEnvironment() query = jsonpath.compile("...") ``` ## JSONPath Terminology Think of a JSON document as a tree, objects (mappings) and arrays can contain other objects, arrays, or scalar values. Each of these (object, array, or scalar) is a _node_ in the tree. The outermost object or array is called the _root_ node. In this guide, a JSON "document" may refer to: - A file containing valid JSON text - A Python string containing valid JSON text - A Python object composed of dictionaries (or any [Mapping](https://docs.python.org/3/library/collections.abc.html#collections-abstract-base-classes)), lists (or any [Sequence](https://docs.python.org/3/library/collections.abc.html#collections-abstract-base-classes)), strings, numbers, booleans, or `None` A JSONPath expression (aka "query") is made up of a sequence of **segments**. Each segment contains one or more **selectors**: - A _segment_ corresponds to a step in the path from one set of nodes to the next. - A _selector_ describes how to choose nodes within that step (for example, by name, by index, or by wildcard). What follows is a description of these selectors, starting with the standard ones defined in [RFC 9535](https://www.rfc-editor.org/rfc/rfc9535). ## Standard selectors and identifiers ### Root identifier The root identifier, `$`, refers to the outermost node in the target document. This can be an object, an array, or a scalar value. A query containing only the root identifier simply returns the entire input document. **Example query** ``` $ ``` ```json title="data" { "categories": [ { "id": 1, "name": "fiction" }, { "id": 2, "name": "non-fiction" } ] } ``` ```json title="results" [ { "categories": [ { "id": 1, "name": "fiction" }, { "id": 2, "name": "non-fiction" } ] } ] ``` ### Name selector A _name selector_ matches the value of an object member by its key. You can write it in either **shorthand notation** (`.thing`) or **bracket notation** (`['thing']` or `["thing"]`). Dot notation can be used when the property name is a valid identifier. Bracket notation is required when the property name contains spaces, special characters, or starts with a number. **Example query** ```text $.book.title ``` ```json title="data" { "book": { "title": "Moby Dick", "author": "Herman Melville" } } ``` ```json title="results" ["Moby Dick"] ``` ### Index selector The index selector selects an element from an array by its index. Indices are zero-based and enclosed in brackets, `[0]`. If the index is negative, items are selected from the end of the array. **Example query** ```text $.categories[0].name ``` ```json title="data" { "categories": [ { "id": 1, "name": "fiction" }, { "id": 2, "name": "non-fiction" } ] } ``` ```json title="results" ["fiction"] ``` ### Wildcard selector The _wildcard selector_ matches all member values of an object or all elements in an array. It can be written as `.*` (shorthand notation) or `[*]` (bracket notation). **Example query** ```text $.categories[*].name ``` ```json title="data" { "categories": [ { "id": 1, "name": "fiction" }, { "id": 2, "name": "non-fiction" } ] } ``` ```json title="results" ["fiction", "non-fiction"] ``` ### Slice selector The slice selector allows you to select a range of elements from an array. A start index, ending index and step size are all optional and separated by colons, `[start:end:step]`. Negative indices count from the end of the array, just like standard Python slicing. **Example query** ```text $.items[1:4:2] ``` ```json title="data" { "items": ["a", "b", "c", "d", "e", "f"] } ``` ```json title="results" ["b", "d"] ``` ### Filter selector Filters allow you to remove nodes from a selection based on a Boolean expression, `[?expression]`. A filter expression evaluates each node in the context of either the root (`$`) or current (`@`) node. When filtering a mapping-like object, `@` identifies the current member value. When filtering a sequence-like object, `@` identifies the current element. Comparison operators include `==`, `!=`, `<`, `>`, `<=`, and `>=`. Logical operators `&&` (and) and `||` (or) can combine terms, and parentheses can be used to group expressions. A filter expression on its own - without a comparison - is treated as an existence test. **Example query** ```text $..products[?(@.price < $.price_cap)] ``` ```json title="data" { "price_cap": 10, "products": [ { "name": "apple", "price": 5 }, { "name": "orange", "price": 12 }, { "name": "banana", "price": 8 } ] } ``` ```json title="results" [ { "name": "apple", "price": 5 }, { "name": "banana", "price": 8 } ] ``` Filter expressions can also call predefined [function extensions](functions.md). ## More on segments So far we've seen shorthand notation (`.selector`) and segments with just one selector (`[selector]`). Here we cover the descendant segment and segments with multiple selectors. ### Segments with multiple selectors A segment can include multiple selectors separated by commas and enclosed in square brackets (`[selector, selector, ...]`). Any valid selector (names, indices, slices, filters, or wildcards) can appear in the list. **Example query** ```text $.store.book[0,2] ``` ```json title="data" { "store": { "book": [ { "title": "Book A", "price": 10 }, { "title": "Book B", "price": 12 }, { "title": "Book C", "price": 8 } ] } } ``` ```json title="results" [ { "title": "Book A", "price": 10 }, { "title": "Book C", "price": 8 } ] ``` ### Descendant segment The descendant segment (`..`) visits all object member values and array elements under the current object or array, applying the selector or selectors that follow to each visited node. It must be followed by a shorthand selector (names, wildcards, etc.) or a bracketed list of one or more selectors. **Example query** ```text $..price ``` ```json title="data" { "store": { "book": [ { "title": "Book A", "price": 10 }, { "title": "Book B", "price": 12 } ], "bicycle": { "color": "red", "price": 19.95 } } } ``` ```json title="results" [10, 12, 19.95] ``` ## Non-standard selectors and identifiers The selectors and identifiers described in this section are an extension to the RFC 9535 specification. They are enabled by default. Set `strict=True` when constructing a [`JSONPathEnvironment`](api.md#jsonpath.JSONPathEnvironment), calling [`findall()`](convenience.md#jsonpath.findall), [`finditer()`](convenience.md#jsonpath.finditer), etc. to disable all non-standard features. Also note that when `strict=False`: - The root identifier (`$`) is optional and paths starting with a dot (`.`) are OK. `.thing` is the same as `$.thing`, as is `thing` and `$["thing"]`. - Leading and trailing whitespace is OK. - Explicit comparisons to `undefined` (aka `missing`) are supported as well as implicit existence tests. ### Key selector **_New in version 2.0.0_** The key selector, `.~name` or `[~'name']`, selects at most one name from an object member. It is syntactically similar to the standard [name selector](https://datatracker.ietf.org/doc/html/rfc9535#name-name-selector), with the addition of a tilde (`~`) prefix. When applied to a JSON object, the key selector selects the _name_ from an object member, if that name exists, or nothing if it does not exist. This complements the standard name selector, which select the _value_ from a name/value pair. When applied to an array or primitive value, the key selector selects nothing. Key selector strings must follow the same processing semantics as name selector strings, as described in [section 2.3.2.1](https://datatracker.ietf.org/doc/html/rfc9535#section-2.3.1.2) of RFC 9535. !!! info The key selector is introduced to facilitate valid normalized paths for nodes produced by the [keys selector](#keys-selector) and the [keys filter selector](#keys-filter-selector). I don't expect it will be of much use elsewhere. #### Syntax ``` selector = name-selector / wildcard-selector / slice-selector / index-selector / filter-selector / key-selector / keys-selector / keys-filter-selector key-selector = "~" name-selector child-segment = bracketed-selection / ("." (wildcard-selector / member-name-shorthand / member-key-shorthand)) descendant-segment = ".." (bracketed-selection / wildcard-selector / member-name-shorthand / member-key-shorthand) member-key-shorthand = "~" name-first *name-char ``` #### Examples ```json title="Example JSON document" { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] } ``` | Query | Result | Result Paths | Comment | | ----------- | ----------------- | ----------------------------------------- | ----------------------------- | | `$.a[0].~c` | `"c"` | `$['a'][0][~'c']` | Key of nested object | | `$.a[1].~c` | | | Key does not exist | | `$..[~'b']` | `"b"`
`"b"` | `$['a'][0][~'b']`
`$['a'][1][~'b']` | Descendant, single quoted key | | `$..[~"b"]` | `"b"`
`"b"` | `$['a'][0][~'b']`
`$['a'][1][~'b']` | Descendant, double quoted key | ### Keys selector **_New in version 0.6.0_** The keys selector, `~` or `[~]`, selects all names from an object’s name/value members. This complements the standard [wildcard selector](https://datatracker.ietf.org/doc/html/rfc9535#name-wildcard-selector), which selects all values from an object’s name/value pairs. As with the wildcard selector, the order of nodes resulting from a keys selector is not stipulated. When applied to an array or primitive value, the keys selector selects nothing. The normalized path of a node selected using the keys selector uses [key selector](#key-selector) syntax. #### Syntax ``` keys-selector = "~" ``` #### Examples ```json title="Example JSON document" { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] } ``` | Query | Result | Result Paths | Comment | | -------------- | ----------------------------------------- | ----------------------------------------------------------------------------------------- | -------------------------- | | `$.a[0].~` | `"b"`
`"c"` | `$['a'][0][~'b']`
`$['a'][0][~'c']` | Object keys | | `$.a.~` | | | Array keys | | `$.a[0][~, ~]` | `"b"`
`"c"`
`"c"`
`"b"` | `$['a'][0][~'b']`
`$['a'][0][~'c']`
`$['a'][0][~'c']`
`$['a'][0][~'b']` | Non-deterministic ordering | | `$..[~]` | `"a"`
`"b"`
`"c"`
`"b"` | `$[~'a']`
`$['a'][0][~'b']`
`$['a'][0][~'c']`
`$['a'][1][~'b']` | Descendant keys | ### Keys filter selector **_New in version 2.0.0_** The keys filter selector selects names from an object’s name/value members. It is syntactically similar to the standard [filter selector](https://datatracker.ietf.org/doc/html/rfc9535#name-filter-selector), with the addition of a tilde (`~`) prefix. ``` ~? ``` Whereas the standard filter selector will produce a node for each _value_ from an object’s name/value members - when its expression evaluates to logical true - the keys filter selector produces a node for each _name_ in an object’s name/value members. Logical expression syntax and semantics otherwise match that of the standard filter selector. `@` still refers to the current member value. See also the [current key identifier](#current-key-identifier). When applied to an array or primitive value, the keys filter selector selects nothing. The normalized path of a node selected using the keys filter selector uses [key selector](#key-selector) syntax. #### Syntax ``` filter-selector = "~?" S logical-expr ``` #### Examples ```json title="Example JSON document" [{ "a": [1, 2, 3], "b": [4, 5] }, { "c": { "x": [1, 2] } }, { "d": [1, 2, 3] }] ``` | Query | Result | Result Paths | Comment | | ---------------------- | ----------------- | ------------------------------- | -------------------------------- | | `$.*[~?length(@) > 2]` | `"a"`
`"d"` | `$[0][~'a']`
`$[2][~'d']` | Conditionally select object keys | | `$.*[~?@.x]` | `"c"` | `$[1][~'c']` | Existence test | | `$[~?(true == true)]` | | | Keys from an array | ### Singular query selector **_New in version 2.0.0_** The singular query selector consist of an embedded absolute singular query, the result of which is used as an object member name or array element index. If the embedded query resolves to a string or int value, at most one object member value or array element value is selected. Otherwise the singular query selector selects nothing. #### Syntax ``` selector = name-selector / wildcard-selector / slice-selector / index-selector / filter-selector / singular-query-selector singular-query-selector = abs-singular-query ``` #### Examples ```json { "a": { "j": [1, 2, 3], "p": { "q": [4, 5, 6] } }, "b": ["j", "p", "q"], "c d": { "x": { "y": 1 } } } ``` | Query | Result | Result Path | Comment | | --------------------- | ------------------ | ---------------- | ----------------------------------------------------------------- | | `$.a[$.b[1]]` | `{"q": [4, 5, 6]}` | `$['a']['p']` | Object name from embedded singular query | | `$.a.j[$['c d'].x.y]` | `2` | `$['a']['j'][1]` | Array index from embedded singular query | | `$.a[$.b]` | | | Embedded singular query does not resolve to a string or int value | ### Current key identifier `#` is the _current key_ identifier. `#` will be the name of the current object member, or index of the current array element. This complements the current node identifier (`@`), which refers to a member value or array element, respectively. It is a syntax error to follow the current key identifier with segments, as if it were a filter query. When used as an argument to a function, the current key is of `ValueType`, and outside a function call it must be compared. #### Syntax ``` comparable = literal / singular-query / ; singular query value function-expr / ; ValueType current-key-identifier function-argument = literal / filter-query / ; (includes singular-query) logical-expr / function-expr / current-key-identifier current-key-identifier = "#" ``` #### Examples ```json title="Example JSON document" { "abc": [1, 2, 3], "def": [4, 5], "abx": [6], "aby": [] } ``` | Query | Result | Result Path | Comment | | ----------------------------------------- | --------------------- | --------------------------------- | --------------------------- | | `$[?match(#, '^ab.*') && length(@) > 0 ]` | `[1,2,3]`
`[6]` | `$['abc']`
`$['abx']` | Match on object names | | `$.abc[?(# >= 1)]` | `2`
`3` | `$['abc'][1]`
`$['abc'][2]` | Compare current array index | ### Pseudo root identifier **_New in version 0.11.0_** The pseudo root identifier (`^`) behaves like the standard root identifier (`$`), but conceptually wraps the target JSON document in a single-element array. This allows the root document itself to be conditionally selected by filters. #### Syntax ``` jsonpath-query = (root-identifier / pseudo-root-identifier) segments root-identifier = "$" pseudo-root-identifier = "^" ``` #### Examples ```json title="Example JSON data" { "a": { "b": 42 }, "n": 7 } ``` | Query | Result | Result Path | Comment | | -------------------------- | ------------------------------ | ----------- | ----------------------------------- | | `^[?@.a.b > 7]` | `{ "a": { "b": 42 } }` | `^[0]` | Conditionally select the root value | | `^[?@.a.v > value(^.*.n)]` | `{ "a": { "b": 42 }, "n": 7 }` | `^[0]` | Embedded pseudo root query | ### Filter context identifier The filter context identifier (`_`) starts an embedded query, similar to the root identifier (`$`) and current node identifier (`@`), but targets JSON-like data passed as the `filter_context` argument to [`findall()`](api.md#jsonpath.JSONPath.findall) and [`finditer()`](api.md#jsonpath.JSONPath.finditer). #### Syntax ``` current-node-identifier = "@" extra-context-identifier = "_" filter-query = rel-query / extra-context-query / jsonpath-query rel-query = current-node-identifier segments extra-context-query = extra-context-identifier segments singular-query = rel-singular-query / abs-singular-query / extra-context-singular-query rel-singular-query = current-node-identifier singular-query-segments abs-singular-query = root-identifier singular-query-segments extra-context-singular-query = extra-context-identifier singular-query-segments ``` #### Examples ```json title="Example JSON data" { "a": [{ "b": 42 }, { "b": 3 }] } ``` ```json title="Extra JSON data" { "c": 42 } ``` | Query | Result | Result Path | Comment | | ------------------ | ------------- | ----------- | -------------------------------------------- | | `$.a[?@.b == _.c]` | `{ "b": 42 }` | `$['a'][0]` | Comparison with extra context singular query | ## Non-standard operators In addition to the operators described below, the standard _logical and_ operator (`&&`) is aliased as `and`, the standard _logical or_ operator (`||`) is aliased as `or`, and `null` is aliased as `nil` and `none`. Also, `true`, `false`, `null` and their aliases can start with an upper case letter. ### Membership operators The membership operators test whether one value occurs within another. An infix expression using `contains` evaluates to true if the right-hand side is a member of the left-hand side, and false otherwise. - If the left-hand side is an object and the right-hand side is a string, the result is true if the object has a member with that name. - If the left-hand side is an array, the result is true if any element of the array is equal to the right-hand side. - For scalars (strings, numbers, booleans, null), `contains` always evaluates to false. The `in` operator is equivalent to `contains` with operands reversed. This makes `contains` and `in` symmetric, so either form may be used depending on which reads more naturally in context. A list literal is a comma separated list of JSONPath expression literals. List should appear on the left-hand side of `contains` or the right-hand side of `in`. #### Syntax ``` basic-expr = paren-expr / comparison-expr / membership-expr / test-expr membership-expr = comparable S membership-op S comparable membership-operator = "contains" / "in" membership-operand = literal / singular-query / ; singular query value function-expr / ; ValueType list-literal list-literal = "[" S literal *(S "," S literal) S "]" ``` #### Examples ```json title="Example JSON data" { "x": [{ "a": ["foo", "bar"] }, { "a": ["bar"] }], "y": [{ "a": { "foo": "bar" } }, { "a": { "bar": "baz" } }], "z": [{ "a": "foo" }, { "a": "bar" }] } ``` | Query | Result | Result Path | Comment | | ------------------------------------- | ----------------------- | ----------- | ------------------------------------ | | `$.x[?@.a contains 'foo']` | `{"a": ["foo", "bar"]}` | `$['x'][0]` | Array contains string literal | | `$.y[?@.a contains 'foo']` | `{"a": ["foo", "bar"]}` | `$['y'][0]` | Object contains string literal | | `$.x[?'foo' in @.a]` | `{"a": ["foo", "bar"]}` | `$['x'][0]` | String literal in array | | `$.y[?'foo' in @.a]` | `{"a": ["foo", "bar"]}` | `$['y'][0]` | String literal in object | | `$.z[?(['bar', 'baz'] contains @.a)]` | `{"a": "bar"}` | `$['z'][1]` | List literal contains embedded query | ### Regex operator `=~` is an infix operator that matches the left-hand side with a regular expression literal on the right-hand side. Regular expression literals use a syntax similar to that found in JavaScript, where the pattern to match is surrounded by slashes, `/pattern/`, optionally followed by flags, `/pattern/flags`. ``` $..products[?(@.description =~ /.*trainers/i)] ``` You can escape a solidus (`/`) with a reverse solidus (`\`). ``` $.some[?(@.thing =~ /fo\/[a-z]/)] ``` As a Python string literal, you'd need to double escape the reverse solidus or use a raw string literal. ```python query = r"$.some[?(@.thing =~ /fo\/[a-z]/)]" query = "$.some[?(@.thing =~ /fo\\/[a-z]/)]" ``` ### Union and intersection operators The union or concatenation operator, `|`, combines matches from two or more paths. The intersection operator, `&`, produces matches that are common to both left and right paths. Note that compound queries are not allowed inside filter expressions. #### Syntax ``` jsonpath-query = root-identifier segments compound-jsonpath-query = jsonpath-query compound-op jsonpath-query compound-op = "|" / "&" ``` #### Examples ```text $..products.*.price | $.price_cap ``` ```text $.categories[?(@.name == 'footwear')].products.* & $.categories[?(@.name == 'headwear')].products.* ``` jg-rp-python-jsonpath-830094f/jsonpath/000077500000000000000000000000001512714264000200035ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/jsonpath/__about__.py000066400000000000000000000002041512714264000222570ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023-present James Prior # # SPDX-License-Identifier: MIT __version__ = "2.0.2" jg-rp-python-jsonpath-830094f/jsonpath/__init__.py000066400000000000000000000255351512714264000221260ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023-present James Prior # # SPDX-License-Identifier: MIT from __future__ import annotations from typing import TYPE_CHECKING from typing import AsyncIterable from typing import Iterable from typing import List from typing import Optional from typing import Union from ._types import JSON from ._types import JSONData from ._types import JSONScalar from .env import JSONPathEnvironment from .exceptions import JSONPatchError from .exceptions import JSONPatchTestFailure from .exceptions import JSONPathError from .exceptions import JSONPathIndexError from .exceptions import JSONPathNameError from .exceptions import JSONPathSyntaxError from .exceptions import JSONPathTypeError from .exceptions import JSONPointerError from .exceptions import JSONPointerIndexError from .exceptions import JSONPointerKeyError from .exceptions import JSONPointerResolutionError from .exceptions import JSONPointerTypeError from .exceptions import RelativeJSONPointerError from .exceptions import RelativeJSONPointerIndexError from .exceptions import RelativeJSONPointerSyntaxError from .filter import UNDEFINED from .fluent_api import Projection from .fluent_api import Query from .lex import Lexer from .match import JSONPathMatch from .match import NodeList from .parse import Parser from .patch import JSONPatch from .path import CompoundJSONPath from .path import JSONPath from .pointer import JSONPointer from .pointer import RelativeJSONPointer from .pointer import resolve if TYPE_CHECKING: from .match import FilterContextVars __all__ = ( "compile", "CompoundJSONPath", "findall_async", "findall", "finditer_async", "finditer", "JSONPatch", "JSONPatchError", "JSONPatchTestFailure", "JSONPath", "JSONPathEnvironment", "JSONPathError", "JSONPathIndexError", "JSONPathMatch", "JSONPathNameError", "JSONPathSyntaxError", "JSONPathTypeError", "JSONPointer", "JSONPointerError", "JSONPointerIndexError", "JSONPointerKeyError", "JSONPointerResolutionError", "JSONPointerTypeError", "Lexer", "NodeList", "match", "Parser", "Projection", "query", "Query", "RelativeJSONPointer", "RelativeJSONPointerError", "RelativeJSONPointerIndexError", "RelativeJSONPointerSyntaxError", "resolve", "JSON", "JSONData", "JSONScalar", "UNDEFINED", ) # For convenience and to delegate to strict or non-strict environments. DEFAULT_ENV = JSONPathEnvironment() _STRICT_ENV = JSONPathEnvironment(strict=True) def compile(path: str, *, strict: bool = False) -> Union[JSONPath, CompoundJSONPath]: # noqa: A001 """Prepare a path string ready for repeated matching against different data. Arguments: path: A JSONPath as a string. strict: When `True`, compile the path for strict compliance with RFC 9535. Returns: A `JSONPath` or `CompoundJSONPath`, ready to match against some data. Expect a `CompoundJSONPath` if the path string uses the _union_ or _intersection_ operators. Raises: JSONPathSyntaxError: If _path_ is invalid. JSONPathTypeError: If filter functions are given arguments of an unacceptable type. """ return _STRICT_ENV.compile(path) if strict else DEFAULT_ENV.compile(path) def findall( path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, strict: bool = False, ) -> List[object]: """Find all objects in _data_ matching the JSONPath _path_. If _data_ is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. strict: When `True`, compile and evaluate with strict compliance with RFC 9535. Returns: A list of matched objects. If there are no matches, the list will be empty. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return ( _STRICT_ENV.findall(path, data, filter_context=filter_context) if strict else DEFAULT_ENV.findall(path, data, filter_context=filter_context) ) async def findall_async( path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, strict: bool = False, ) -> List[object]: """Find all objects in _data_ matching the JSONPath _path_. If _data_ is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. strict: When `True`, compile and evaluate with strict compliance with RFC 9535. Returns: A list of matched objects. If there are no matches, the list will be empty. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return ( await _STRICT_ENV.findall_async(path, data, filter_context=filter_context) if strict else await DEFAULT_ENV.findall_async(path, data, filter_context=filter_context) ) def finditer( path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, strict: bool = False, ) -> Iterable[JSONPathMatch]: """Generate `JSONPathMatch` objects for each match of _path_ in _data_. If _data_ is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. strict: When `True`, compile and evaluate with strict compliance with RFC 9535. Returns: An iterator yielding `JSONPathMatch` objects for each match. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return ( _STRICT_ENV.finditer(path, data, filter_context=filter_context) if strict else DEFAULT_ENV.finditer(path, data, filter_context=filter_context) ) async def finditer_async( path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, strict: bool = False, ) -> AsyncIterable[JSONPathMatch]: """Find all objects in _data_ matching the JSONPath _path_. If _data_ is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. strict: When `True`, compile and evaluate with strict compliance with RFC 9535. Returns: A list of matched objects. If there are no matches, the list will be empty. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return ( await _STRICT_ENV.finditer_async(path, data, filter_context=filter_context) if strict else await DEFAULT_ENV.finditer_async(path, data, filter_context=filter_context) ) def match( path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, strict: bool = False, ) -> Union[JSONPathMatch, None]: """Return a `JSONPathMatch` instance for the first object found in _data_. `None` is returned if there are no matches. Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. strict: When `True`, compile and evaluate with strict compliance with RFC 9535. Returns: A `JSONPathMatch` object for the first match, or `None` if there were no matches. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return ( _STRICT_ENV.match(path, data, filter_context=filter_context) if strict else DEFAULT_ENV.match(path, data, filter_context=filter_context) ) def query( path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, strict: bool = False, ) -> Query: """Return a `Query` iterator over matches found by applying _path_ to _data_. `Query` objects are iterable. ``` for match in jsonpath.query("$.foo..bar", data): ... ``` You can skip and limit results with `Query.skip()` and `Query.limit()`. ``` matches = ( jsonpath.query("$.foo..bar", data) .skip(5) .limit(10) ) for match in matches ... ``` `Query.tail()` will get the last _n_ results. ``` for match in jsonpath.query("$.foo..bar", data).tail(5): ... ``` Get values for each match using `Query.values()`. ``` for obj in jsonpath.query("$.foo..bar", data).limit(5).values(): ... ``` Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. strict: When `True`, compile and evaluate with strict compliance with RFC 9535. Returns: A query iterator. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return ( _STRICT_ENV.query(path, data, filter_context=filter_context) if strict else DEFAULT_ENV.query(path, data, filter_context=filter_context) ) jg-rp-python-jsonpath-830094f/jsonpath/__main__.py000066400000000000000000000000751512714264000220770ustar00rootroot00000000000000"""CLI entry point.""" from jsonpath.cli import main main() jg-rp-python-jsonpath-830094f/jsonpath/_data.py000066400000000000000000000011011512714264000214160ustar00rootroot00000000000000import json import re from io import IOBase from typing import Any _RE_PROBABLY_MALFORMED = re.compile(r"[\{\}\[\]]") def load_data(data: object) -> Any: if isinstance(data, str): try: return json.loads(data) except json.JSONDecodeError: # Overly simple way to detect a malformed JSON document vs a # top-level string only document if _RE_PROBABLY_MALFORMED.search(data): raise return data if isinstance(data, IOBase): return json.loads(data.read()) return data jg-rp-python-jsonpath-830094f/jsonpath/_types.py000066400000000000000000000014621512714264000216630ustar00rootroot00000000000000from __future__ import annotations from io import IOBase from typing import Any from typing import Mapping from typing import Sequence from typing import Union JSONScalar = Union[str, int, float, bool, None] """A scalar JSON-like value. This includes primitive types that can appear in JSON: string, number, boolean, or null. """ JSON = Union[JSONScalar, Sequence[Any], Mapping[str, Any]] """A JSON-like data structure. This covers scalars, sequences (e.g. lists, tuples), and mappings (e.g. dictionaries with string keys). Values inside may be untyped (`Any`) rather than recursively constrained to `JSON` for flexibility. """ JSONData = Union[str, IOBase, JSON] """Input representing JSON content. Accepts: - a JSON-like object (`JSON`), - a raw JSON string, - or a file-like object containing JSON data. """ jg-rp-python-jsonpath-830094f/jsonpath/cli.py000066400000000000000000000237511512714264000211340ustar00rootroot00000000000000"""JSONPath, JSON Pointer and JSON Patch command line interface.""" import argparse import json import sys import jsonpath from jsonpath.__about__ import __version__ from jsonpath.exceptions import JSONPatchError from jsonpath.exceptions import JSONPathIndexError from jsonpath.exceptions import JSONPathSyntaxError from jsonpath.exceptions import JSONPathTypeError from jsonpath.exceptions import JSONPointerError INDENT = 2 def path_sub_command(parser: argparse.ArgumentParser) -> None: # noqa: D103 parser.set_defaults(func=handle_path_command) group = parser.add_mutually_exclusive_group(required=True) group.add_argument( "-q", "--query", help="JSONPath query string.", ) group.add_argument( "-r", "--path-file", type=argparse.FileType(mode="r"), help="Text file containing a JSONPath query.", ) parser.add_argument( "-f", "--file", type=argparse.FileType(mode="rb"), default=sys.stdin, help=( "File to read the target JSON document from. " "Defaults to reading from the standard input stream." ), ) parser.add_argument( "-o", "--output", type=argparse.FileType(mode="w"), default=sys.stdout, help=( "File to write resulting objects to, as a JSON array. " "Defaults to the standard output stream." ), ) parser.add_argument( "--no-type-checks", action="store_true", help="Disables filter expression well-typedness checks.", ) parser.add_argument( "--strict", action="store_true", help=( "Compile and evaluate JSONPath expressions with strict " "compliance with RFC 9535." ), ) def pointer_sub_command(parser: argparse.ArgumentParser) -> None: # noqa: D103 parser.set_defaults(func=handle_pointer_command) group = parser.add_mutually_exclusive_group(required=True) group.add_argument( "-p", "--pointer", help="RFC 6901 formatted JSON Pointer string.", ) group.add_argument( "-r", "--pointer-file", type=argparse.FileType(mode="r"), help="Text file containing an RFC 6901 formatted JSON Pointer string.", ) parser.add_argument( "-f", "--file", type=argparse.FileType(mode="rb"), default=sys.stdin, help=( "File to read the target JSON document from. " "Defaults to reading from the standard input stream." ), ) parser.add_argument( "-o", "--output", type=argparse.FileType(mode="w"), default=sys.stdout, help=( "File to write the resulting object to, in JSON format. " "Defaults to the standard output stream." ), ) parser.add_argument( "-u", "--uri-decode", action="store_true", help="Unescape URI escape sequences found in JSON Pointers", ) def patch_sub_command(parser: argparse.ArgumentParser) -> None: # noqa: D103 parser.set_defaults(func=handle_patch_command) parser.add_argument( "patch", type=argparse.FileType(mode="rb"), metavar="PATCH", help="File containing an RFC 6902 formatted JSON Patch.", ) parser.add_argument( "-f", "--file", type=argparse.FileType(mode="rb"), default=sys.stdin, help=( "File to read the target JSON document from. " "Defaults to reading from the standard input stream." ), ) parser.add_argument( "-o", "--output", type=argparse.FileType(mode="w"), default=sys.stdout, help=( "File to write the resulting JSON document to. " "Defaults to the standard output stream." ), ) parser.add_argument( "-u", "--uri-decode", action="store_true", help="Unescape URI escape sequences found in JSON Pointers", ) _EPILOG = """\ Use [json COMMAND --help] for command specific help. Usage Examples: Find objects in source.json matching a JSONPath, write them to result.json. $ json path -q "$.foo['bar'][?@.baz > 1]" -f source.json -o result.json Resolve a JSON Pointer against source.json, pretty print the result to stdout. $ json --pretty pointer -p "/foo/bar/0" -f source.json Apply JSON Patch patch.json to JSON from stdin, output to result.json. $ cat source.json | json patch /path/to/patch.json -o result.json """ class DescriptionHelpFormatter( argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter, ): """Raw epilog formatter with defaults.""" def setup_parser() -> argparse.ArgumentParser: # noqa: D103 parser = argparse.ArgumentParser( prog="json", formatter_class=DescriptionHelpFormatter, description="JSONPath, JSON Pointer and JSON Patch utilities.", epilog=_EPILOG, ) parser.add_argument( "--debug", action="store_true", help="Show stack traces.", ) parser.add_argument( "--pretty", action="store_true", help="Add indents and newlines to output JSON.", ) parser.add_argument( "-v", "--version", action="version", version=f"python-jsonpath, version {__version__}", help="Show the version and exit.", ) parser.add_argument( "--no-unicode-escape", action="store_true", help="Disable decoding of UTF-16 escape sequence within paths and pointers.", ) subparsers = parser.add_subparsers( dest="command", required=True, metavar="COMMAND", ) path_sub_command( subparsers.add_parser( name="path", help="Find objects in a JSON document given a JSONPath.", description="Find objects in a JSON document given a JSONPath.", ) ) pointer_sub_command( subparsers.add_parser( name="pointer", help="Resolve a JSON Pointer against a JSON document.", description="Resolve a JSON Pointer against a JSON document.", ) ) patch_sub_command( subparsers.add_parser( name="patch", help="Apply a JSON Patch to a JSON document.", description="Apply a JSON Patch to a JSON document.", ) ) return parser def handle_path_command(args: argparse.Namespace) -> None: # noqa: PLR0912 """Handle the `path` sub command.""" # Empty string is OK. if args.query is not None: query = args.query else: query = args.query_file.read().strip() try: path = jsonpath.JSONPathEnvironment( unicode_escape=not args.no_unicode_escape, well_typed=not args.no_type_checks, strict=args.strict, ).compile(query) except JSONPathSyntaxError as err: if args.debug: raise sys.stderr.write(f"json path syntax error: {err}\n") sys.exit(1) except JSONPathTypeError as err: if args.debug: raise sys.stderr.write(f"json path type error: {err}\n") sys.exit(1) except JSONPathIndexError as err: if args.debug: raise sys.stderr.write(f"json path index error: {err}\n") sys.exit(1) try: matches = path.findall(args.file) except json.JSONDecodeError as err: if args.debug: raise sys.stderr.write(f"target document json decode error: {err}\n") sys.exit(1) except JSONPathTypeError as err: # Type errors are currently only occurring are compile-time. if args.debug: raise sys.stderr.write(f"json path type error: {err}\n") sys.exit(1) indent = INDENT if args.pretty else None json.dump(matches, args.output, indent=indent) def handle_pointer_command(args: argparse.Namespace) -> None: """Handle the `pointer` sub command.""" # Empty string is OK. if args.pointer is not None: pointer = args.pointer else: pointer = args.pointer_file.read().strip() try: match = jsonpath.pointer.resolve( pointer, args.file, unicode_escape=not args.no_unicode_escape, uri_decode=args.uri_decode, ) except json.JSONDecodeError as err: if args.debug: raise sys.stderr.write(f"target document json decode error: {err}\n") sys.exit(1) except JSONPointerError as err: if args.debug: raise sys.stderr.write(str(err) + "\n") sys.exit(1) indent = INDENT if args.pretty else None json.dump(match, args.output, indent=indent) def handle_patch_command(args: argparse.Namespace) -> None: """Handle the `patch` sub command.""" try: patch = json.load(args.patch) except json.JSONDecodeError as err: if args.debug: raise sys.stderr.write(f"patch document json decode error: {err}\n") sys.exit(1) if not isinstance(patch, list): sys.stderr.write( "error: patch file does not look like an array of patch operations" ) sys.exit(1) try: patched = jsonpath.patch.apply( patch, args.file, unicode_escape=not args.no_unicode_escape, uri_decode=args.uri_decode, ) except json.JSONDecodeError as err: if args.debug: raise sys.stderr.write(f"target document json decode error: {err}\n") sys.exit(1) except JSONPatchError as err: if args.debug: raise sys.stderr.write(str(err) + "\n") sys.exit(1) indent = INDENT if args.pretty else None json.dump(patched, args.output, indent=indent) def main() -> None: """CLI argument parser entry point.""" parser = setup_parser() args = parser.parse_args() args.func(args) if __name__ == "__main__": main() jg-rp-python-jsonpath-830094f/jsonpath/env.py000066400000000000000000000577341512714264000211650ustar00rootroot00000000000000"""Core JSONPath configuration object.""" from __future__ import annotations try: import regex # noqa: F401 REGEX_AVAILABLE = True except ImportError: REGEX_AVAILABLE = False try: import iregexp_check # noqa: F401 IREGEXP_AVAILABLE = True except ImportError: IREGEXP_AVAILABLE = False from decimal import Decimal from operator import getitem from typing import TYPE_CHECKING from typing import Any from typing import AsyncIterable from typing import Callable from typing import Dict from typing import Iterable from typing import List from typing import Mapping from typing import Optional from typing import Sequence from typing import Type from typing import Union from . import function_extensions from .exceptions import JSONPathNameError from .exceptions import JSONPathSyntaxError from .exceptions import JSONPathTypeError from .filter import UNDEFINED from .filter import VALUE_TYPE_EXPRESSIONS from .filter import BaseExpression from .filter import FilterQuery from .filter import FunctionExtension from .filter import InfixExpression from .fluent_api import Query from .function_extensions import ExpressionType from .function_extensions import FilterFunction from .function_extensions import validate from .lex import Lexer from .match import JSONPathMatch from .match import NodeList from .parse import Parser from .path import CompoundJSONPath from .path import JSONPath from .stream import TokenStream from .token import TOKEN_EOF from .token import TOKEN_INTERSECTION from .token import TOKEN_PSEUDO_ROOT from .token import TOKEN_UNION from .token import Token if TYPE_CHECKING: from ._types import JSONData from .match import FilterContextVars class JSONPathEnvironment: """JSONPath configuration. This class contains settings for path tokenization, parsing and resolution behavior, plus convenience methods for matching an unparsed path to some data. Most applications will want to create a single `JSONPathEnvironment`, or use `jsonpath.compile()`, `jsonpath.findall()`, etc. from the package-level default environment. ## Environment customization Environment customization is achieved by subclassing `JSONPathEnvironment` and overriding class attributes and/or methods. Some of these customizations include: - Changing the root (`$`), self (`@`) or filter context (`_`) token with class attributes `root_token`, `self_token` and `filter_context_token`. - Registering a custom lexer or parser with the class attributes `lexer_class` or `parser_class`. `lexer_class` must be a subclass of [`Lexer`]() and `parser_class` must be a subclass of [`Parser`](). - Setup built-in function extensions by overriding `setup_function_extensions()` - Hook in to mapping and sequence item getting by overriding `getitem()`. - Change filter comparison operator behavior by overriding `compare()`. Arguments: filter_caching (bool): If `True`, filter expressions will be cached where possible. unicode_escape: If `True`, decode UTF-16 escape sequences found in JSONPath string literals. well_typed: Control well-typedness checks on filter function expressions. If `True` (the default), JSONPath expressions are checked for well-typedness as compile time. **New in version 0.10.0** strict: When `True`, follow RFC 9535 strictly. **New in version 2.0.0** ## Class attributes Attributes: pseudo_root_token (str): The pattern used to select a "fake" root node, one level above the real root node. filter_context_token (str): The pattern used to select extra filter context data. Defaults to `"_"`. intersection_token (str): The pattern used as the intersection operator. Defaults to `"&"`. key_token (str): The pattern used to identify the current key or index when filtering a mapping or sequence. Defaults to `"#"`. keys_selector_token (str): The pattern used as the "keys" selector. Defaults to `"~"`. keys_filter_token (str): The pattern used as the "keys filter" selector. Defaults to `"~?"`. lexer_class: The lexer to use when tokenizing path strings. max_int_index (int): The maximum integer allowed when selecting array items by index. Defaults to `(2**53) - 1`. min_int_index (int): The minimum integer allowed when selecting array items by index. Defaults to `-(2**53) + 1`. max_recursion_depth (int): The maximum number of dict/objects and/or arrays/ lists the recursive descent selector can visit before a `JSONPathRecursionError` is thrown. parser_class: The parser to use when parsing tokens from the lexer. root_token (str): The pattern used to select the root node in a JSON document. Defaults to `"$"`. self_token (str): The pattern used to select the current node in a JSON document. Defaults to `"@"` union_token (str): The pattern used as the union operator. Defaults to `"|"`. """ # These should be unescaped strings. `re.escape` will be called on them # automatically when compiling lexer rules. pseudo_root_token = "^" filter_context_token = "_" intersection_token = "&" key_token = "#" keys_selector_token = "~" keys_filter_token = "~?" root_token = "$" self_token = "@" union_token = "|" max_int_index = (2**53) - 1 min_int_index = -(2**53) + 1 max_recursion_depth = 100 # Override these to customize path tokenization and parsing. lexer_class: Type[Lexer] = Lexer parser_class: Type[Parser] = Parser match_class: Type[JSONPathMatch] = JSONPathMatch def __init__( self, *, filter_caching: bool = True, unicode_escape: bool = True, well_typed: bool = True, strict: bool = False, ) -> None: self.filter_caching: bool = filter_caching """Enable or disable filter expression caching.""" self.unicode_escape: bool = unicode_escape """Enable or disable decoding of UTF-16 escape sequences found in JSONPath string literals.""" self.well_typed: bool = well_typed """Control well-typedness checks on filter function expressions.""" self.strict: bool = strict """When `True`, follow RFC 9535 strictly. This includes things like enforcing a leading root identifier and ensuring there's no leading or trailing whitespace when parsing a JSONPath query. """ self.regex_available: bool = REGEX_AVAILABLE """When `True`, the third party `regex` package is available.""" self.iregexp_available: bool = IREGEXP_AVAILABLE """When `True`, the iregexp_check package is available. iregexp_check will be used to validate regular expressions against RFC 9485, if available. """ self.lexer: Lexer = self.lexer_class(env=self) """The lexer bound to this environment.""" self.parser: Parser = self.parser_class(env=self) """The parser bound to this environment.""" self.function_extensions: Dict[str, Callable[..., Any]] = {} """A list of function extensions available to filters.""" self.setup_function_extensions() def compile(self, path: str) -> Union[JSONPath, CompoundJSONPath]: # noqa: A003 """Prepare a path string ready for repeated matching against different data. Arguments: path: A JSONPath as a string. Returns: A `JSONPath` or `CompoundJSONPath`, ready to match against some data. Expect a `CompoundJSONPath` if the path string uses the _union_ or _intersection_ operators. Raises: JSONPathSyntaxError: If _path_ is invalid. JSONPathTypeError: If filter functions are given arguments of an unacceptable type. """ tokens = self.lexer.tokenize(path) stream = TokenStream(tokens) pseudo_root = stream.current().kind == TOKEN_PSEUDO_ROOT _path: Union[JSONPath, CompoundJSONPath] = JSONPath( env=self, segments=self.parser.parse(stream), pseudo_root=pseudo_root ) if stream.skip_whitespace() and self.strict: raise JSONPathSyntaxError( "unexpected whitespace", token=stream.tokens[stream.pos - 1] ) if stream.current().kind != TOKEN_EOF: _path = CompoundJSONPath(env=self, path=_path) while stream.current().kind != TOKEN_EOF: if stream.peek().kind == TOKEN_EOF: # trailing union or intersection raise JSONPathSyntaxError( f"expected a path after {stream.current().value!r}", token=stream.current(), ) if stream.current().kind == TOKEN_UNION: stream.next() stream.skip_whitespace() pseudo_root = stream.current().kind == TOKEN_PSEUDO_ROOT _path = _path.union( JSONPath( env=self, segments=self.parser.parse(stream), pseudo_root=pseudo_root, ) ) elif stream.current().kind == TOKEN_INTERSECTION: stream.next() stream.skip_whitespace() pseudo_root = stream.current().kind == TOKEN_PSEUDO_ROOT _path = _path.intersection( JSONPath( env=self, segments=self.parser.parse(stream), pseudo_root=pseudo_root, ) ) else: # pragma: no cover # Parser.parse catches this too raise JSONPathSyntaxError( # noqa: TRY003 f"unexpected token {stream.current().value!r}", token=stream.current(), ) return _path def findall( self, path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, ) -> List[object]: """Find all objects in _data_ matching the JSONPath _path_. If _data_ is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A list of matched objects. If there are no matches, the list will be empty. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return self.compile(path).findall(data, filter_context=filter_context) def finditer( self, path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, ) -> Iterable[JSONPathMatch]: """Generate `JSONPathMatch` objects for each match of _path_ in _data_. If _data_ is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: An iterator yielding `JSONPathMatch` objects for each match. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return self.compile(path).finditer(data, filter_context=filter_context) def match( self, path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, ) -> Union[JSONPathMatch, None]: """Return a `JSONPathMatch` instance for the first object found in _data_. `None` is returned if there are no matches. Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A `JSONPathMatch` object for the first match, or `None` if there were no matches. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return self.compile(path).match(data, filter_context=filter_context) def query( self, path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, ) -> Query: """Return a `Query` iterator over matches found by applying _path_ to _data_. `Query` objects are iterable. ``` for match in jsonpath.query("$.foo..bar", data): ... ``` You can skip and limit results with `Query.skip()` and `Query.limit()`. ``` matches = ( jsonpath.query("$.foo..bar", data) .skip(5) .limit(10) ) for match in matches ... ``` `Query.tail()` will get the last _n_ results. ``` for match in jsonpath.query("$.foo..bar", data).tail(5): ... ``` Get values for each match using `Query.values()`. ``` for obj in jsonpath.query("$.foo..bar", data).limit(5).values(): ... ``` Arguments: path: The JSONPath as a string. data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A query iterator. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return Query(self.finditer(path, data, filter_context=filter_context), self) async def findall_async( self, path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, ) -> List[object]: """An async version of `findall()`.""" return await self.compile(path).findall_async( data, filter_context=filter_context ) async def finditer_async( self, path: str, data: JSONData, *, filter_context: Optional[FilterContextVars] = None, ) -> AsyncIterable[JSONPathMatch]: """An async version of `finditer()`.""" return await self.compile(path).finditer_async( data, filter_context=filter_context ) def setup_function_extensions(self) -> None: """Initialize function extensions.""" self.function_extensions["length"] = function_extensions.Length() self.function_extensions["count"] = function_extensions.Count() self.function_extensions["match"] = function_extensions.Match() self.function_extensions["search"] = function_extensions.Search() self.function_extensions["value"] = function_extensions.Value() if not self.strict: self.function_extensions["isinstance"] = function_extensions.IsInstance() self.function_extensions["is"] = self.function_extensions["isinstance"] self.function_extensions["typeof"] = function_extensions.TypeOf() self.function_extensions["type"] = self.function_extensions["typeof"] self.function_extensions["startswith"] = function_extensions.StartsWith() def validate_function_extension_signature( self, token: Token, args: List[Any] ) -> List[Any]: """Compile-time validation of function extension arguments. RFC 9535 requires us to reject paths that use filter functions with too many or too few arguments. """ try: func = self.function_extensions[token.value] except KeyError as err: raise JSONPathNameError( f"function {token.value!r} is not defined", token=token ) from err # Type-aware function extensions use the spec's type system. if self.well_typed and isinstance(func, FilterFunction): self.check_well_typedness(token, func, args) return args # A callable with a `validate` method? if hasattr(func, "validate"): args = func.validate(self, args, token) assert isinstance(args, list) return args # Generic validation using introspection. return validate(self, func, args, token) def check_well_typedness( self, token: Token, func: FilterFunction, args: List[BaseExpression], ) -> None: """Check the well-typedness of a function's arguments at compile-time.""" # Correct number of arguments? if len(args) != len(func.arg_types): plural = "" if len(func.arg_types) == 1 else "s" raise JSONPathTypeError( f"{token.value}() requires {len(func.arg_types)} argument{plural}", token=token, ) # Argument types for idx, typ in enumerate(func.arg_types): arg = args[idx] if typ == ExpressionType.VALUE: if not ( isinstance(arg, VALUE_TYPE_EXPRESSIONS) or (isinstance(arg, FilterQuery) and arg.path.singular_query()) or (self._function_return_type(arg) == ExpressionType.VALUE) ): raise JSONPathTypeError( f"{token.value}() argument {idx} must be of ValueType", token=token, ) elif typ == ExpressionType.LOGICAL: if not isinstance(arg, (FilterQuery, InfixExpression)): raise JSONPathTypeError( f"{token.value}() argument {idx} must be of LogicalType", token=token, ) elif typ == ExpressionType.NODES and not ( isinstance(arg, FilterQuery) or self._function_return_type(arg) == ExpressionType.NODES ): raise JSONPathTypeError( f"{token.value}() argument {idx} must be of NodesType", token=token, ) def _function_return_type(self, expr: BaseExpression) -> Optional[ExpressionType]: """Return the type returned from a filter function. If _expr_ is not a `FunctionExtension` or the registered function definition is not type-aware, return `None`. """ if not isinstance(expr, FunctionExtension): return None func = self.function_extensions.get(expr.name) if isinstance(func, FilterFunction): return func.return_type return None def getitem(self, obj: Any, key: Any) -> Any: """Sequence and mapping item getter used throughout JSONPath resolution. The default implementation of `getitem` simply calls `operators.getitem()` from Python's standard library. Same as `obj[key]`. Arguments: obj: A mapping or sequence that might contain _key_. key: A mapping key, sequence index or sequence slice. """ return getitem(obj, key) async def getitem_async(self, obj: Any, key: object) -> Any: """An async sequence and mapping item getter.""" if hasattr(obj, "__getitem_async__"): return await obj.__getitem_async__(key) return getitem(obj, key) def is_truthy(self, obj: object) -> bool: """Test for truthiness when evaluating JSONPath filter expressions. In some cases, RFC 9535 requires us to test for existence rather than truthiness. So the default implementation returns `True` for empty collections and `None`. The special `UNDEFINED` object means that _obj_ was missing, as opposed to an explicit `None`. Arguments: obj: Any object. Returns: `True` if the object exists and is not `False` or `0`. """ if isinstance(obj, NodeList) and len(obj) == 0: return False if obj is UNDEFINED: return False if obj is None: return True return bool(obj) def compare( # noqa: PLR0911 self, left: object, operator: str, right: object ) -> bool: """Object comparison within JSONPath filters. Override this to customize filter expression comparison operator behavior. Args: left: The left hand side of the comparison expression. operator: The comparison expression's operator. right: The right hand side of the comparison expression. Returns: `True` if the comparison between _left_ and _right_, with the given _operator_, is truthy. `False` otherwise. """ if operator == "&&": return self.is_truthy(left) and self.is_truthy(right) if operator == "||": return self.is_truthy(left) or self.is_truthy(right) if operator == "==": return self._eq(left, right) if operator == "!=": return not self._eq(left, right) if operator == "<": return self._lt(left, right) if operator == ">": return self._lt(right, left) if operator == ">=": return self._lt(right, left) or self._eq(left, right) if operator == "<=": return self._lt(left, right) or self._eq(left, right) if operator == "in" and isinstance(right, (Mapping, Sequence)): return left in right if operator == "contains" and isinstance(left, (Mapping, Sequence)): return right in left if operator == "=~" and hasattr(right, "fullmatch") and isinstance(left, str): # Right should be a regex.Pattern or an re.Pattern. return bool(right.fullmatch(left)) return False def _eq(self, left: object, right: object) -> bool: # noqa: PLR0911 if isinstance(right, NodeList): left, right = right, left if isinstance(left, NodeList): if isinstance(right, NodeList): return left == right if left.empty(): return right is UNDEFINED if len(left) == 1: return left[0] == right return False if left is UNDEFINED and right is UNDEFINED: return True # Remember 1 == True and 0 == False in Python if isinstance(right, bool): left, right = right, left if isinstance(left, bool): return isinstance(right, bool) and left == right return left == right def _lt(self, left: object, right: object) -> bool: if isinstance(left, str) and isinstance(right, str): return left < right if isinstance(left, (int, float, Decimal)) and isinstance( right, (int, float, Decimal) ): return left < right return False jg-rp-python-jsonpath-830094f/jsonpath/exceptions.py000066400000000000000000000145421512714264000225440ustar00rootroot00000000000000"""JSONPath exceptions.""" from __future__ import annotations from typing import TYPE_CHECKING from typing import Optional from .token import TOKEN_EOF if TYPE_CHECKING: from .token import Token class JSONPathError(Exception): """Base exception for all errors. Arguments: args: Arguments passed to `Exception`. token: The token that caused the error. """ def __init__(self, *args: object, token: Optional[Token] = None) -> None: super().__init__(*args) self.token: Optional[Token] = token def __str__(self) -> str: return self.detailed_message() def detailed_message(self) -> str: """Return an error message formatted with extra context info.""" if not self.token: return super().__str__() lineno, col, _prev, current, _next = self._error_context( self.token.path, self.token.index ) if self.token.kind == TOKEN_EOF: col = len(current) pad = " " * len(str(lineno)) length = len(self.token.value) pointer = (" " * col) + ("^" * max(length, 1)) return ( f"{self.message}\n" f"{pad} -> {self.token.path!r} {lineno}:{col}\n" f"{pad} |\n" f"{lineno} | {current}\n" f"{pad} | {pointer} {self.message}\n" ) @property def message(self) -> object: """The exception's error message if one was given.""" if self.args: return self.args[0] return None def _error_context(self, text: str, index: int) -> tuple[int, int, str, str, str]: lines = text.splitlines(keepends=True) cumulative_length = 0 target_line_index = -1 for i, line in enumerate(lines): cumulative_length += len(line) if index < cumulative_length: target_line_index = i break if target_line_index == -1: raise ValueError("index is out of bounds for the given string") # Line number (1-based) line_number = target_line_index + 1 # Column number within the line column_number = index - (cumulative_length - len(lines[target_line_index])) previous_line = ( lines[target_line_index - 1].rstrip() if target_line_index > 0 else "" ) current_line = lines[target_line_index].rstrip() next_line = ( lines[target_line_index + 1].rstrip() if target_line_index < len(lines) - 1 else "" ) return line_number, column_number, previous_line, current_line, next_line class JSONPathSyntaxError(JSONPathError): """An exception raised when parsing a JSONPath string. Arguments: args: Arguments passed to `Exception`. token: The token that caused the error. """ def __init__(self, *args: object, token: Token) -> None: super().__init__(*args) self.token = token class JSONPathTypeError(JSONPathError): """An exception raised due to a type error. This should only occur at when evaluating filter expressions. """ class JSONPathIndexError(JSONPathError): """An exception raised when an array index is out of range. Arguments: args: Arguments passed to `Exception`. token: The token that caused the error. """ def __init__(self, *args: object, token: Token) -> None: super().__init__(*args) self.token = token class JSONPathNameError(JSONPathError): """An exception raised when an unknown function extension is called. Arguments: args: Arguments passed to `Exception`. token: The token that caused the error. """ def __init__(self, *args: object, token: Token) -> None: super().__init__(*args) self.token = token class JSONPathRecursionError(JSONPathError): """An exception raised when the maximum recursion depth is reached. Arguments: args: Arguments passed to `Exception`. token: The token that caused the error. """ def __init__(self, *args: object, token: Token) -> None: super().__init__(*args) self.token = token class JSONPointerError(Exception): """Base class for all JSON Pointer errors.""" class JSONPointerResolutionError(JSONPointerError): """Base exception for those that can be raised during pointer resolution.""" class JSONPointerIndexError(JSONPointerResolutionError, IndexError): """An exception raised when an array index is out of range.""" def __str__(self) -> str: return f"pointer index error: {super().__str__()}" class JSONPointerKeyError(JSONPointerResolutionError, KeyError): """An exception raised when a pointer references a mapping with a missing key.""" def __str__(self) -> str: return f"pointer key error: {super().__str__()}" class JSONPointerTypeError(JSONPointerResolutionError, TypeError): """An exception raised when a pointer resolves a string against a sequence.""" def __str__(self) -> str: return f"pointer type error: {super().__str__()}" class RelativeJSONPointerError(Exception): """Base class for all Relative JSON Pointer errors.""" class RelativeJSONPointerIndexError(RelativeJSONPointerError): """An exception raised when modifying a pointer index out of range.""" class RelativeJSONPointerSyntaxError(RelativeJSONPointerError): """An exception raised when we fail to parse a relative JSON Pointer.""" def __init__(self, msg: str, rel: str) -> None: super().__init__(msg) self.rel = rel def __str__(self) -> str: if not self.rel: return super().__str__() msg = self.rel[:7] if len(msg) == 6: # noqa: PLR2004 msg += ".." return f"{super().__str__()} {msg!r}" class JSONPatchError(Exception): """Base class for all JSON Patch errors.""" class JSONPatchTestFailure(JSONPatchError): # noqa: N818 """An exception raised when a JSON Patch _test_ op fails.""" def _truncate_message(value: str, num: int, end: str = "...") -> str: if len(value) < num: return value return f"{value[: num - len(end)]}{end}" def _truncate_words(val: str, num: int, end: str = "...") -> str: # Replaces consecutive whitespace with a single newline. words = val.split() if len(words) < num: return " ".join(words) return " ".join(words[:num]) + end jg-rp-python-jsonpath-830094f/jsonpath/filter.py000066400000000000000000000541311512714264000216460ustar00rootroot00000000000000"""Filter expression nodes.""" from __future__ import annotations import copy import re from abc import ABC from abc import abstractmethod from typing import TYPE_CHECKING from typing import Any from typing import Callable from typing import Generic from typing import Iterable from typing import List from typing import Mapping from typing import Pattern from typing import Sequence from typing import TypeVar from jsonpath.function_extensions.filter_function import ExpressionType from .exceptions import JSONPathTypeError from .function_extensions import FilterFunction from .match import NodeList from .selectors import Filter as FilterSelector from .serialize import canonical_string if TYPE_CHECKING: from .path import JSONPath from .selectors import FilterContext class BaseExpression(ABC): """Base class for all filter expression nodes.""" __slots__ = ("volatile",) FORCE_CACHE = False def __init__(self) -> None: self.volatile: bool = any(child.volatile for child in self.children()) @abstractmethod def evaluate(self, context: FilterContext) -> object: """Resolve the filter expression in the given _context_. Arguments: context: Contextual information the expression might choose use during evaluation. Returns: The result of evaluating the expression. """ @abstractmethod async def evaluate_async(self, context: FilterContext) -> object: """An async version of `evaluate`.""" @abstractmethod def children(self) -> List[BaseExpression]: """Return a list of direct child expressions.""" @abstractmethod def set_children(self, children: List[BaseExpression]) -> None: # noqa: ARG002 """Update this expression's child expressions. _children_ is assumed to have the same number of items as is returned by _self.children_, and in the same order. """ class Nil(BaseExpression): """The constant `nil`. Also aliased as `null` and `None`, sometimes. """ __slots__ = () def __eq__(self, other: object) -> bool: return other is None or isinstance(other, Nil) def __repr__(self) -> str: # pragma: no cover return "NIL()" def __str__(self) -> str: # pragma: no cover return "nil" def evaluate(self, _: FilterContext) -> None: return None async def evaluate_async(self, _: FilterContext) -> None: return None def children(self) -> List[BaseExpression]: return [] def set_children(self, children: List[BaseExpression]) -> None: # noqa: ARG002 return NIL = Nil() class _Undefined: __slots__ = () def __eq__(self, other: object) -> bool: return ( other is UNDEFINED_LITERAL or other is UNDEFINED or (isinstance(other, NodeList) and other.empty()) ) def __str__(self) -> str: return "" def __repr__(self) -> str: return "" # This is equivalent to the spec's special `Nothing` value. UNDEFINED = _Undefined() class Undefined(BaseExpression): """The constant `undefined`.""" __slots__ = () def __eq__(self, other: object) -> bool: return ( isinstance(other, Undefined) or other is UNDEFINED or (isinstance(other, NodeList) and len(other) == 0) ) def __str__(self) -> str: return "undefined" def evaluate(self, _: FilterContext) -> object: return UNDEFINED async def evaluate_async(self, _: FilterContext) -> object: return UNDEFINED def children(self) -> List[BaseExpression]: return [] def set_children(self, children: List[BaseExpression]) -> None: # noqa: ARG002 return UNDEFINED_LITERAL = Undefined() LITERAL_EXPRESSION_T = TypeVar("LITERAL_EXPRESSION_T") class FilterExpressionLiteral(BaseExpression, Generic[LITERAL_EXPRESSION_T]): """Base class for filter expression literals.""" __slots__ = ("value",) def __init__(self, *, value: LITERAL_EXPRESSION_T) -> None: self.value = value super().__init__() def __str__(self) -> str: return repr(self.value).lower() def __eq__(self, other: object) -> bool: return self.value == other def __hash__(self) -> int: return hash(self.value) def evaluate(self, _: FilterContext) -> LITERAL_EXPRESSION_T: return self.value async def evaluate_async(self, _: FilterContext) -> LITERAL_EXPRESSION_T: return self.value def children(self) -> List[BaseExpression]: return [] def set_children(self, children: List[BaseExpression]) -> None: # noqa: ARG002 return class BooleanLiteral(FilterExpressionLiteral[bool]): """A Boolean `True` or `False`.""" __slots__ = () TRUE = BooleanLiteral(value=True) FALSE = BooleanLiteral(value=False) class StringLiteral(FilterExpressionLiteral[str]): """A string literal.""" __slots__ = () def __str__(self) -> str: return canonical_string(self.value) class IntegerLiteral(FilterExpressionLiteral[int]): """An integer literal.""" __slots__ = () class FloatLiteral(FilterExpressionLiteral[float]): """A float literal.""" __slots__ = () class RegexLiteral(FilterExpressionLiteral[Pattern[str]]): """A regex literal.""" __slots__ = () RE_FLAG_MAP = { re.A: "a", re.I: "i", re.M: "m", re.S: "s", } RE_UNESCAPE = re.compile(r"\\(.)") def __str__(self) -> str: flags: List[str] = [] for flag, ch in self.RE_FLAG_MAP.items(): if self.value.flags & flag: flags.append(ch) return f"/{self.value.pattern}/{''.join(flags)}" class ListLiteral(BaseExpression): """A list literal.""" __slots__ = ("items",) def __init__(self, items: List[BaseExpression]) -> None: self.items = items super().__init__() def __str__(self) -> str: items = ", ".join(str(item) for item in self.items) return f"[{items}]" def __eq__(self, other: object) -> bool: return isinstance(other, ListLiteral) and self.items == other.items def evaluate(self, context: FilterContext) -> object: return [item.evaluate(context) for item in self.items] async def evaluate_async(self, context: FilterContext) -> object: return [await item.evaluate_async(context) for item in self.items] def children(self) -> List[BaseExpression]: return self.items def set_children(self, children: List[BaseExpression]) -> None: # noqa: ARG002 self.items = children class PrefixExpression(BaseExpression): """An expression composed of a prefix operator and another expression.""" __slots__ = ("operator", "right") def __init__(self, operator: str, right: BaseExpression): self.operator = operator self.right = right super().__init__() def __str__(self) -> str: return f"{self.operator}{self.right}" def __eq__(self, other: object) -> bool: return ( isinstance(other, PrefixExpression) and self.operator == other.operator and self.right == other.right ) def _evaluate(self, context: FilterContext, right: object) -> object: if self.operator == "!": return not context.env.is_truthy(right) raise JSONPathTypeError(f"unknown operator {self.operator} {self.right}") def evaluate(self, context: FilterContext) -> object: return self._evaluate(context, self.right.evaluate(context)) async def evaluate_async(self, context: FilterContext) -> object: return self._evaluate(context, await self.right.evaluate_async(context)) def children(self) -> List[BaseExpression]: return [self.right] def set_children(self, children: List[BaseExpression]) -> None: assert len(children) == 1 self.right = children[0] class InfixExpression(BaseExpression): """A pair of expressions and a comparison or logical operator.""" __slots__ = ("left", "operator", "right", "logical") def __init__( self, left: BaseExpression, operator: str, right: BaseExpression, ): self.left = left self.operator = operator self.right = right self.logical = operator in ("&&", "||") super().__init__() def __str__(self) -> str: if self.logical: return f"({self.left} {self.operator} {self.right})" return f"{self.left} {self.operator} {self.right}" def __eq__(self, other: object) -> bool: return ( isinstance(other, InfixExpression) and self.left == other.left and self.operator == other.operator and self.right == other.right ) def evaluate(self, context: FilterContext) -> bool: left = self.left.evaluate(context) if not self.logical and isinstance(left, NodeList) and len(left) == 1: left = left[0].obj right = self.right.evaluate(context) if not self.logical and isinstance(right, NodeList) and len(right) == 1: right = right[0].obj return context.env.compare(left, self.operator, right) async def evaluate_async(self, context: FilterContext) -> bool: left = await self.left.evaluate_async(context) if not self.logical and isinstance(left, NodeList) and len(left) == 1: left = left[0].obj right = await self.right.evaluate_async(context) if not self.logical and isinstance(right, NodeList) and len(right) == 1: right = right[0].obj return context.env.compare(left, self.operator, right) def children(self) -> List[BaseExpression]: return [self.left, self.right] def set_children(self, children: List[BaseExpression]) -> None: assert len(children) == 2 # noqa: PLR2004 self.left = children[0] self.right = children[1] PRECEDENCE_LOWEST = 1 PRECEDENCE_LOGICAL_OR = 3 PRECEDENCE_LOGICAL_AND = 4 PRECEDENCE_PREFIX = 7 class FilterExpression(BaseExpression): """An expression that evaluates to `True` or `False`.""" __slots__ = ("expression",) def __init__(self, expression: BaseExpression): self.expression = expression super().__init__() def cache_tree(self) -> FilterExpression: """Return a copy of _self.expression_ augmented with caching nodes.""" def _cache_tree(expr: BaseExpression) -> BaseExpression: children = expr.children() if expr.volatile: _expr = copy.copy(expr) elif not expr.FORCE_CACHE and len(children) == 0: _expr = expr else: _expr = CachingFilterExpression(copy.copy(expr)) _expr.set_children([_cache_tree(child) for child in children]) return _expr return FilterExpression(_cache_tree(copy.copy(self.expression))) def cacheable_nodes(self) -> bool: """Return `True` if there are any cacheable nodes in this expression tree.""" return any( isinstance(node, CachingFilterExpression) for node in walk(self.cache_tree()) ) def __str__(self) -> str: return self._canonical_string(self.expression, PRECEDENCE_LOWEST) def __eq__(self, other: object) -> bool: return ( isinstance(other, FilterExpression) and self.expression == other.expression ) def _canonical_string( self, expression: BaseExpression, parent_precedence: int ) -> str: if isinstance(expression, InfixExpression): if expression.operator == "&&": left = self._canonical_string(expression.left, PRECEDENCE_LOGICAL_AND) right = self._canonical_string(expression.right, PRECEDENCE_LOGICAL_AND) expr = f"{left} && {right}" return ( f"({expr})" if parent_precedence >= PRECEDENCE_LOGICAL_AND else expr ) if expression.operator == "||": left = self._canonical_string(expression.left, PRECEDENCE_LOGICAL_OR) right = self._canonical_string(expression.right, PRECEDENCE_LOGICAL_OR) expr = f"{left} || {right}" return ( f"({expr})" if parent_precedence >= PRECEDENCE_LOGICAL_OR else expr ) if isinstance(expression, PrefixExpression): operand = self._canonical_string(expression.right, PRECEDENCE_PREFIX) expr = f"!{operand}" return f"({expr})" if parent_precedence > PRECEDENCE_PREFIX else expr return str(expression) def evaluate(self, context: FilterContext) -> bool: return context.env.is_truthy(self.expression.evaluate(context)) async def evaluate_async(self, context: FilterContext) -> bool: return context.env.is_truthy(await self.expression.evaluate_async(context)) def children(self) -> List[BaseExpression]: return [self.expression] def set_children(self, children: List[BaseExpression]) -> None: assert len(children) == 1 self.expression = children[0] class CachingFilterExpression(BaseExpression): """A FilterExpression wrapper that caches the result.""" __slots__ = ( "_cached", "_expr", ) _UNSET = object() def __init__(self, expression: BaseExpression): self.volatile = False self._expr = expression self._cached: object = self._UNSET def evaluate(self, context: FilterContext) -> object: if self._cached is self._UNSET: self._cached = self._expr.evaluate(context) return self._cached async def evaluate_async(self, context: FilterContext) -> object: if self._cached is self._UNSET: self._cached = await self._expr.evaluate_async(context) return self._cached def children(self) -> List[BaseExpression]: return self._expr.children() def set_children(self, children: List[BaseExpression]) -> None: self._expr.set_children(children) class FilterQuery(BaseExpression, ABC): """Base expression for all _sub paths_ found in filter expressions.""" __slots__ = ("path",) def __init__(self, path: JSONPath) -> None: self.path = path super().__init__() def __eq__(self, other: object) -> bool: return isinstance(other, FilterQuery) and str(self) == str(other) def children(self) -> List[BaseExpression]: _children: List[BaseExpression] = [] for segment in self.path.segments: for selector in segment.selectors: if isinstance(selector, FilterSelector): _children.append(selector.expression) return _children def set_children(self, children: List[BaseExpression]) -> None: # noqa: ARG002 # self.path has its own cache return class RelativeFilterQuery(FilterQuery): """A JSONPath starting at the current node.""" __slots__ = () def __init__(self, path: JSONPath) -> None: super().__init__(path) self.volatile = True def __str__(self) -> str: return "@" + str(self.path)[1:] def evaluate(self, context: FilterContext) -> object: if isinstance(context.current, str) or not isinstance( context.current, (Sequence, Mapping) ): if self.path.empty(): return context.current return NodeList() return NodeList( self.path.finditer( context.current, filter_context=context.extra_context, ) ) async def evaluate_async(self, context: FilterContext) -> object: if isinstance(context.current, str) or not isinstance( context.current, (Sequence, Mapping) ): if self.path.empty(): return context.current return NodeList() return NodeList( [ match async for match in await self.path.finditer_async( context.current, filter_context=context.extra_context, ) ] ) class RootFilterQuery(FilterQuery): """A JSONPath starting at the root node.""" __slots__ = () FORCE_CACHE = True def __init__(self, path: JSONPath) -> None: super().__init__(path) self.volatile = False def __str__(self) -> str: return str(self.path) def evaluate(self, context: FilterContext) -> object: return NodeList( self.path.finditer( context.root, filter_context=context.extra_context, ) ) async def evaluate_async(self, context: FilterContext) -> object: return NodeList( [ match async for match in await self.path.finditer_async( context.root, filter_context=context.extra_context, ) ] ) class FilterContextPath(FilterQuery): """A JSONPath starting at the root of any extra context data.""" __slots__ = () FORCE_CACHE = True def __init__(self, path: JSONPath) -> None: super().__init__(path) self.volatile = False def __str__(self) -> str: path_repr = str(self.path) return "_" + path_repr[1:] def evaluate(self, context: FilterContext) -> object: return NodeList( self.path.finditer( context.extra_context, filter_context=context.extra_context, ) ) async def evaluate_async(self, context: FilterContext) -> object: return NodeList( [ match async for match in await self.path.finditer_async( context.extra_context, filter_context=context.extra_context, ) ] ) class FunctionExtension(BaseExpression): """A filter function.""" __slots__ = ("name", "args") def __init__(self, name: str, args: Sequence[BaseExpression]) -> None: self.name = name self.args = args super().__init__() def __str__(self) -> str: args = [str(arg) for arg in self.args] return f"{self.name}({', '.join(args)})" def __eq__(self, other: object) -> bool: return ( isinstance(other, FunctionExtension) and other.name == self.name and other.args == self.args ) def evaluate(self, context: FilterContext) -> object: try: func = context.env.function_extensions[self.name] except KeyError: # This can only happen if the environment's function register has been # changed since the query was parsed. return UNDEFINED args = [arg.evaluate(context) for arg in self.args] return func(*self._unpack_node_lists(func, args)) async def evaluate_async(self, context: FilterContext) -> object: try: func = context.env.function_extensions[self.name] except KeyError: # This can only happen if the environment's function register has been # changed since the query was parsed. return UNDEFINED args = [await arg.evaluate_async(context) for arg in self.args] return func(*self._unpack_node_lists(func, args)) def _unpack_node_lists( self, func: Callable[..., Any], args: List[object] ) -> List[object]: if isinstance(func, FilterFunction): _args: List[object] = [] for idx, arg in enumerate(args): if func.arg_types[idx] != ExpressionType.NODES and isinstance( arg, NodeList ): if len(arg) == 0: # If the query results in an empty nodelist, the # argument is the special result Nothing. _args.append(UNDEFINED) elif len(arg) == 1: # If the query results in a nodelist consisting of a # single node, the argument is the value of the node _args.append(arg[0].obj) else: # This should not be possible as a non-singular query # would have been rejected when checking function # well-typedness. _args.append(arg) else: _args.append(arg) return _args # Legacy way to indicate that a filter function wants node lists as arguments. if getattr(func, "with_node_lists", False): return args return [ obj.values_or_singular() if isinstance(obj, NodeList) else obj for obj in args ] def children(self) -> List[BaseExpression]: return list(self.args) def set_children(self, children: List[BaseExpression]) -> None: assert len(children) == len(self.args) self.args = children class CurrentKey(BaseExpression): """The key/property or index associated with the current object.""" __slots__ = () def __init__(self) -> None: super().__init__() self.volatile = True def __str__(self) -> str: return "#" def __eq__(self, other: object) -> bool: return isinstance(other, CurrentKey) def evaluate(self, context: FilterContext) -> object: if context.current_key is None: return UNDEFINED return context.current_key async def evaluate_async(self, context: FilterContext) -> object: return self.evaluate(context) def children(self) -> List[BaseExpression]: return [] def set_children(self, children: List[BaseExpression]) -> None: # noqa: ARG002 return CURRENT_KEY = CurrentKey() def walk(expr: BaseExpression) -> Iterable[BaseExpression]: """Walk the filter expression tree starting at _expr_.""" yield expr for child in expr.children(): yield from walk(child) VALUE_TYPE_EXPRESSIONS = ( Nil, Undefined, FilterExpressionLiteral, ListLiteral, CurrentKey, ) jg-rp-python-jsonpath-830094f/jsonpath/fluent_api.py000066400000000000000000000216351512714264000225120ustar00rootroot00000000000000"""A fluent API for working with `JSONPathMatch` iterators.""" from __future__ import annotations import collections import itertools from enum import Enum from enum import auto from typing import TYPE_CHECKING from typing import Any from typing import Dict from typing import Iterable from typing import Iterator from typing import List from typing import Mapping from typing import Optional from typing import Sequence from typing import Tuple from typing import Union if TYPE_CHECKING: from jsonpath import CompoundJSONPath from jsonpath import JSONPath from jsonpath import JSONPathEnvironment from jsonpath import JSONPathMatch from jsonpath import JSONPointer class Projection(Enum): """Projection style used by `Query.select()`.""" RELATIVE = auto() """The default projection. Selections include parent arrays and objects relative to the JSONPathMatch.""" ROOT = auto() """Selections include parent arrays and objects relative to the root JSON value.""" FLAT = auto() """All selections are appended to a new array/list, without arrays and objects on the path to the selected value.""" class Query: """A fluent API for managing `JSONPathMatch` iterators. Usually you'll want to use `jsonpath.query()` or `JSONPathEnvironment.query()` to create instances of `Query` rather than instantiating `Query` directly. Arguments: it: A `JSONPathMatch` iterable, as you'd get from `jsonpath.finditer()` or `JSONPathEnvironment.finditer()`. **New in version 1.1.0** """ def __init__(self, it: Iterable[JSONPathMatch], env: JSONPathEnvironment) -> None: self._it = iter(it) self._env = env def __iter__(self) -> Iterator[JSONPathMatch]: return self._it def limit(self, n: int) -> Query: """Limit the query iterator to at most _n_ matches. Raises: ValueError: If _n_ < 0. """ if n < 0: raise ValueError("can't limit by a negative number of matches") self._it = itertools.islice(self._it, n) return self def head(self, n: int) -> Query: """Limit the query iterator to at most the first _n_ matches. `head()` is an alias for `limit()`. Raises: ValueError: If _n_ < 0. """ return self.limit(n) def first(self, n: int) -> Query: """Limit the query iterator to at most the first _n_ matches. `first()` is an alias for `limit()`. Raises: ValueError: If _n_ < 0. """ return self.limit(n) def drop(self, n: int) -> Query: """Skip up to _n_ matches from the query iterator. Raises: ValueError: If _n_ < 0. """ if n < 0: raise ValueError("can't drop a negative number of matches") if n > 0: next(itertools.islice(self._it, n, n), None) return self def skip(self, n: int) -> Query: """Skip up to _n_ matches from the query iterator. Raises: ValueError: If _n_ < 0. """ return self.drop(n) def tail(self, n: int) -> Query: """Drop matches up to the last _n_ matches from the iterator. Raises: ValueError: If _n_ < 0. """ if n < 0: raise ValueError("can't select a negative number of matches") self._it = iter(collections.deque(self._it, maxlen=n)) return self def last(self, n: int) -> Query: """Drop up to the last _n_ matches from the iterator. `last()` is an alias for `tail()`. Raises: ValueError: If _n_ < 0. """ return self.tail(n) def values(self) -> Iterable[object]: """Return an iterable of objects associated with each match.""" return (m.obj for m in self._it) def locations(self) -> Iterable[str]: """Return an iterable of normalized paths, one for each match.""" return (m.path for m in self._it) def items(self) -> Iterable[Tuple[str, object]]: """Return an iterable of (path, object) tuples, one for each match.""" return ((m.path, m.obj) for m in self._it) def pointers(self) -> Iterable[JSONPointer]: """Return an iterable of JSONPointers, one for each match.""" return (m.pointer() for m in self._it) def first_one(self) -> Optional[JSONPathMatch]: """Return the first `JSONPathMatch` or `None` if there were no matches.""" try: return next(self._it) except StopIteration: return None def one(self) -> Optional[JSONPathMatch]: """Return the first `JSONPathMatch` or `None` if there were no matches. `one()` is an alias for `first_one()`. """ return self.first_one() def last_one(self) -> Optional[JSONPathMatch]: """Return the last `JSONPathMatch` or `None` if there were no matches.""" try: return next(iter(self.tail(1))) except StopIteration: return None def tee(self, n: int = 2) -> Tuple[Query, ...]: """Return _n_ independent queries by teeing this query's iterator. It is not safe to use a `Query` instance after calling `tee()`. """ return tuple(Query(it, self._env) for it in itertools.tee(self._it, n)) def take(self, n: int) -> Query: """Return a new query iterating over the next _n_ matches. It is safe to continue using this query after calling take. """ return Query(list(itertools.islice(self._it, n)), self._env) def select( self, *expressions: Union[str, JSONPath, CompoundJSONPath], projection: Projection = Projection.RELATIVE, ) -> Iterable[object]: """Query projection using relative JSONPaths. Arguments: expressions: One or more JSONPath query expressions to select relative to each match in this query iterator. projection: The style of projection used when selecting values. Can be one of `Projection.RELATIVE`, `Projection.ROOT` or `Projection.FLAT`. Defaults to `Projection.RELATIVE`. Returns: An iterable of objects built from selecting _expressions_ relative to each match from the current query. **New in version 1.2.0** """ return filter( bool, (self._select(m, expressions, projection) for m in self._it), ) def _select( self, match: JSONPathMatch, expressions: Tuple[Union[str, JSONPath, CompoundJSONPath], ...], projection: Projection, ) -> object: if not isinstance(match.obj, (Mapping, Sequence)) or isinstance(match.obj, str): return None if projection == Projection.RELATIVE: obj: Dict[Union[int, str], Any] = {} for expr in expressions: path = self._env.compile(expr) if isinstance(expr, str) else expr for rel_match in path.finditer(match.obj): # type: ignore _patch_obj(rel_match.parts, obj, rel_match.obj) return _fix_sparse_arrays(obj) if projection == Projection.FLAT: arr: List[object] = [] for expr in expressions: path = self._env.compile(expr) if isinstance(expr, str) else expr for rel_match in path.finditer(match.obj): # type: ignore arr.append(rel_match.obj) return arr # Project from the root document obj = {} for expr in expressions: path = self._env.compile(expr) if isinstance(expr, str) else expr for rel_match in path.finditer(match.obj): # type: ignore _patch_obj(match.parts + rel_match.parts, obj, rel_match.obj) return _fix_sparse_arrays(obj) def _patch_obj( parts: Tuple[Union[int, str], ...], obj: Mapping[Union[str, int], Any], value: object, ) -> None: _obj = obj # For lack of a better idea, we're patching arrays to dictionaries with # integer keys. This is to handle sparse array selections without having # to keep track of indexes and how they map from the root JSON value to # the selected JSON value. # # We'll fix these "sparse arrays" after the patch has been applied. for part in parts[:-1]: if part not in _obj: _obj[part] = {} # type: ignore _obj = _obj[part] _obj[parts[-1]] = value # type: ignore def _fix_sparse_arrays(obj: Any) -> object: """Fix sparse arrays (dictionaries with integer keys).""" if isinstance(obj, str) or not obj: return obj if isinstance(obj, Sequence): return [_fix_sparse_arrays(e) for e in obj] if isinstance(obj, Mapping): if isinstance(next(iter(obj)), int): return [_fix_sparse_arrays(v) for v in obj.values()] return {k: _fix_sparse_arrays(v) for k, v in obj.items()} return obj jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/000077500000000000000000000000001512714264000241075ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/__init__.py000066400000000000000000000011321512714264000262150ustar00rootroot00000000000000# noqa: D104 from .arguments import validate # noqa: I001 from .filter_function import ExpressionType from .filter_function import FilterFunction from .count import Count from .is_instance import IsInstance from .keys import Keys from .length import Length from .match import Match from .search import Search from .starts_with import StartsWith from .typeof import TypeOf from .value import Value __all__ = ( "Count", "ExpressionType", "FilterFunction", "IsInstance", "Keys", "Length", "Match", "Search", "StartsWith", "TypeOf", "validate", "Value", ) jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/_pattern.py000066400000000000000000000063031512714264000262770ustar00rootroot00000000000000from typing import List from typing import Optional try: import regex as re REGEX_AVAILABLE = True except ImportError: import re # type: ignore REGEX_AVAILABLE = False try: from iregexp_check import check IREGEXP_AVAILABLE = True except ImportError: IREGEXP_AVAILABLE = False from jsonpath.exceptions import JSONPathError from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction from jsonpath.lru_cache import LRUCache from jsonpath.lru_cache import ThreadSafeLRUCache class AbstractRegexFilterFunction(FilterFunction): """Base class for filter function that accept regular expression arguments. Arguments: cache_capacity: The size of the regular expression cache. debug: When `True`, raise an exception when regex pattern compilation fails. The default - as required by RFC 9535 - is `False`, which silently ignores bad patterns. thread_safe: When `True`, use a `ThreadSafeLRUCache` instead of an instance of `LRUCache`. """ arg_types = [ExpressionType.VALUE, ExpressionType.VALUE] return_type = ExpressionType.LOGICAL def __init__( self, *, cache_capacity: int = 300, debug: bool = False, thread_safe: bool = False, ): self.cache: LRUCache[str, Optional[re.Pattern]] = ( # type: ignore ThreadSafeLRUCache(capacity=cache_capacity) if thread_safe else LRUCache(capacity=cache_capacity) ) self.debug = debug def check_cache(self, pattern: str) -> Optional[re.Pattern]: # type: ignore """Return a compiled re pattern if `pattern` is valid, or `None` otherwise.""" try: _pattern = self.cache[pattern] except KeyError: if IREGEXP_AVAILABLE and not check(pattern): if self.debug: raise JSONPathError( "search pattern is not a valid I-Regexp", token=None ) from None _pattern = None else: if REGEX_AVAILABLE: pattern = map_re(pattern) try: _pattern = re.compile(pattern) except re.error: if self.debug: raise _pattern = None self.cache[pattern] = _pattern return _pattern def map_re(pattern: str) -> str: """Convert an I-Regexp pattern into a Python re pattern.""" escaped = False char_class = False parts: List[str] = [] for ch in pattern: if escaped: parts.append(ch) escaped = False continue if ch == ".": if not char_class: parts.append(r"(?:(?![\r\n])\P{Cs}|\p{Cs}\p{Cs})") else: parts.append(ch) elif ch == "\\": escaped = True parts.append(ch) elif ch == "[": char_class = True parts.append(ch) elif ch == "]": char_class = False parts.append(ch) else: parts.append(ch) return "".join(parts) jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/arguments.py000066400000000000000000000032401512714264000264650ustar00rootroot00000000000000"""Class-based function extension base.""" import inspect from typing import TYPE_CHECKING from typing import Any from typing import Callable from typing import List if TYPE_CHECKING: from jsonpath.env import JSONPathEnvironment from jsonpath.token import Token from jsonpath.exceptions import JSONPathTypeError def validate( _: "JSONPathEnvironment", func: Callable[..., Any], args: List[Any], token: "Token", ) -> List[Any]: """Generic validation of function extension arguments using introspection. RFC 9535 requires us to reject paths that use filter functions with too many or too few arguments. """ params = list(inspect.signature(func).parameters.values()) # Keyword only params are not supported if [p for p in params if p.kind in (p.KEYWORD_ONLY, p.VAR_KEYWORD)]: raise JSONPathTypeError( f"function {token.value!r} requires keyword arguments", token=token, ) # Too few args? positional_args = [ p for p in params if p.kind in (p.POSITIONAL_ONLY, p.POSITIONAL_OR_KEYWORD) ] if len(args) < len(positional_args): raise JSONPathTypeError( f"{token.value!r}() requires {len(positional_args)} arguments", token=token, ) # Does the signature have var args? has_var_args = bool([p for p in params if p.kind == p.VAR_POSITIONAL]) # Too many args? if not has_var_args and len(args) > len(positional_args): raise JSONPathTypeError( f"{token.value!r}() requires at most " f"{len(positional_args) + len(positional_args)} arguments", token=token, ) return args jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/count.py000066400000000000000000000011051512714264000256060ustar00rootroot00000000000000"""The standard `count` function extension.""" from __future__ import annotations from typing import TYPE_CHECKING from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction if TYPE_CHECKING: from jsonpath.match import NodeList class Count(FilterFunction): """The built-in `count` function.""" arg_types = [ExpressionType.NODES] return_type = ExpressionType.VALUE def __call__(self, node_list: NodeList) -> int: """Return the number of nodes in the node list.""" return len(node_list) jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/filter_function.py000066400000000000000000000015021512714264000276510ustar00rootroot00000000000000"""Classes modeling the JSONPath spec type system for function extensions.""" from abc import ABC from abc import abstractmethod from enum import Enum from typing import Any from typing import List class ExpressionType(Enum): """The type of a filter function argument or return value.""" VALUE = 1 LOGICAL = 2 NODES = 3 class FilterFunction(ABC): """Base class for typed function extensions.""" @property @abstractmethod def arg_types(self) -> List[ExpressionType]: """Argument types expected by the filter function.""" @property @abstractmethod def return_type(self) -> ExpressionType: """The type of the value returned by the filter function.""" @abstractmethod def __call__(self, *args: Any, **kwds: Any) -> Any: """Called the filter function.""" jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/is_instance.py000066400000000000000000000033401512714264000267600ustar00rootroot00000000000000"""A non-standard "isinstance" filter function.""" from typing import Mapping from typing import Sequence from jsonpath.filter import UNDEFINED from jsonpath.filter import UNDEFINED_LITERAL from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction from jsonpath.match import NodeList class IsInstance(FilterFunction): """A non-standard "isinstance" filter function.""" arg_types = [ExpressionType.NODES, ExpressionType.VALUE] return_type = ExpressionType.LOGICAL def __call__(self, nodes: NodeList, t: str) -> bool: # noqa: PLR0911 """Return `True` if the type of _obj_ matches _t_. This function allows _t_ to be one of several aliases for the real Python "type". Some of these aliases follow JavaScript/JSON semantics. """ if not nodes: return t in ("undefined", "missing") obj = nodes.values_or_singular() if ( obj is UNDEFINED or obj is UNDEFINED_LITERAL or (isinstance(obj, NodeList) and len(obj) == 0) ): return t in ("undefined", "missing") if obj is None: return t in ("null", "nil", "None", "none") if isinstance(obj, str): return t in ("str", "string") if isinstance(obj, Sequence): return t in ("array", "list", "sequence", "tuple") if isinstance(obj, Mapping): return t in ("object", "dict", "mapping") if isinstance(obj, bool): return t in ("bool", "boolean") if isinstance(obj, int): return t in ("number", "int") if isinstance(obj, float): return t in ("number", "float") return t == "object" jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/keys.py000066400000000000000000000014661512714264000254430ustar00rootroot00000000000000"""The `keys` JSONPath filter function.""" from typing import Mapping from typing import Tuple from typing import Union from jsonpath.filter import UNDEFINED from jsonpath.filter import _Undefined from .filter_function import ExpressionType from .filter_function import FilterFunction class Keys(FilterFunction): """The `keys` JSONPath filter function.""" arg_types = [ExpressionType.VALUE] return_type = ExpressionType.VALUE def __call__( self, value: Mapping[str, object] ) -> Union[Tuple[str, ...], _Undefined]: """Return a tuple of keys in `value`. If `value` does not have a `keys()` method, the special _Nothing_ value is returned. """ try: return tuple(value.keys()) except AttributeError: return UNDEFINED jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/length.py000066400000000000000000000014221512714264000257410ustar00rootroot00000000000000"""The standard `length` function extension.""" from collections.abc import Sized from typing import Union from jsonpath.filter import UNDEFINED from jsonpath.filter import _Undefined from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction class Length(FilterFunction): """A type-aware implementation of the standard `length` function.""" arg_types = [ExpressionType.VALUE] return_type = ExpressionType.VALUE def __call__(self, obj: Sized) -> Union[int, _Undefined]: """Return an object's length. If the object does not have a length, the special _Nothing_ value is returned. """ try: return len(obj) except TypeError: return UNDEFINED jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/match.py000066400000000000000000000010751512714264000255600ustar00rootroot00000000000000"""The standard `match` function extension.""" from ._pattern import AbstractRegexFilterFunction class Match(AbstractRegexFilterFunction): """The standard `match` function.""" def __call__(self, value: object, pattern: object) -> bool: """Return `True` if _value_ matches _pattern_, or `False` otherwise.""" if not isinstance(value, str) or not isinstance(pattern, str): return False _pattern = self.check_cache(pattern) if _pattern is None: return False return bool(_pattern.fullmatch(value)) jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/search.py000066400000000000000000000010751512714264000257310ustar00rootroot00000000000000"""The standard `search` function extension.""" from ._pattern import AbstractRegexFilterFunction class Search(AbstractRegexFilterFunction): """The standard `search` function.""" def __call__(self, value: object, pattern: object) -> bool: """Return `True` if _value_ matches _pattern_, or `False` otherwise.""" if not isinstance(value, str) or not isinstance(pattern, str): return False _pattern = self.check_cache(pattern) if _pattern is None: return False return bool(_pattern.search(value)) jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/starts_with.py000066400000000000000000000012441512714264000270350ustar00rootroot00000000000000"""The `startswith` function extension.""" from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction class StartsWith(FilterFunction): """The `startswith` function extension.""" arg_types = [ExpressionType.VALUE, ExpressionType.VALUE] return_type = ExpressionType.LOGICAL def __call__(self, value: object, prefix: object) -> bool: """Return `True` if `value` starts with `prefix`.""" if not isinstance(value, str) or not isinstance(prefix, str): return False try: return value.startswith(prefix) except AttributeError: return False jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/typeof.py000066400000000000000000000033641512714264000257750ustar00rootroot00000000000000"""A non-standard "typeof" filter function.""" from typing import Mapping from typing import Sequence from jsonpath.filter import UNDEFINED from jsonpath.filter import UNDEFINED_LITERAL from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction from jsonpath.match import NodeList class TypeOf(FilterFunction): """A non-standard "typeof" filter function. Arguments: single_number_type: If True, will return "number" for ints and floats, otherwise we'll use "int" and "float" respectively. Defaults to `True`. """ arg_types = [ExpressionType.NODES] return_type = ExpressionType.VALUE def __init__(self, *, single_number_type: bool = True) -> None: self.single_number_type = single_number_type def __call__(self, nodes: NodeList) -> str: # noqa: PLR0911 """Return the type of _obj_ as a string. The strings returned from this function use JSON terminology, much like the result of JavaScript's `typeof` operator. """ if not nodes: return "undefined" obj = nodes.values_or_singular() if obj is UNDEFINED or obj is UNDEFINED_LITERAL: return "undefined" if obj is None: return "null" if isinstance(obj, str): return "string" if isinstance(obj, Sequence): return "array" if isinstance(obj, Mapping): return "object" if isinstance(obj, bool): return "boolean" if isinstance(obj, int): return "number" if self.single_number_type else "int" if isinstance(obj, float): return "number" if self.single_number_type else "float" return "object" jg-rp-python-jsonpath-830094f/jsonpath/function_extensions/value.py000066400000000000000000000013211512714264000255720ustar00rootroot00000000000000"""The standard `value` function extension.""" from __future__ import annotations from typing import TYPE_CHECKING from jsonpath.filter import UNDEFINED from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction if TYPE_CHECKING: from jsonpath.match import NodeList class Value(FilterFunction): """A type-aware implementation of the standard `value` function.""" arg_types = [ExpressionType.NODES] return_type = ExpressionType.VALUE def __call__(self, nodes: NodeList) -> object: """Return the first node in a node list if it has only one item.""" if len(nodes) == 1: return nodes[0].obj return UNDEFINED jg-rp-python-jsonpath-830094f/jsonpath/lex.py000066400000000000000000000274021512714264000211520ustar00rootroot00000000000000"""JSONPath tokenization.""" from __future__ import annotations import re from functools import partial from typing import TYPE_CHECKING from typing import Iterator from typing import Pattern from .exceptions import JSONPathSyntaxError from .token import TOKEN_AND from .token import TOKEN_COLON from .token import TOKEN_COMMA from .token import TOKEN_CONTAINS from .token import TOKEN_DDOT from .token import TOKEN_DOT from .token import TOKEN_DOT_KEY_PROPERTY from .token import TOKEN_DOT_PROPERTY from .token import TOKEN_DOUBLE_QUOTE_STRING from .token import TOKEN_EQ from .token import TOKEN_ERROR from .token import TOKEN_FALSE from .token import TOKEN_FILTER from .token import TOKEN_FILTER_CONTEXT from .token import TOKEN_FLOAT from .token import TOKEN_FUNCTION from .token import TOKEN_GE from .token import TOKEN_GT from .token import TOKEN_IN from .token import TOKEN_INT from .token import TOKEN_INTERSECTION from .token import TOKEN_KEY from .token import TOKEN_KEY_NAME from .token import TOKEN_KEYS from .token import TOKEN_KEYS_FILTER from .token import TOKEN_LBRACKET from .token import TOKEN_LE from .token import TOKEN_LG from .token import TOKEN_LPAREN from .token import TOKEN_LT from .token import TOKEN_MISSING from .token import TOKEN_NAME from .token import TOKEN_NE from .token import TOKEN_NIL from .token import TOKEN_NONE from .token import TOKEN_NOT from .token import TOKEN_NULL from .token import TOKEN_OR from .token import TOKEN_PSEUDO_ROOT from .token import TOKEN_RBRACKET from .token import TOKEN_RE from .token import TOKEN_RE_FLAGS from .token import TOKEN_RE_PATTERN from .token import TOKEN_ROOT from .token import TOKEN_RPAREN from .token import TOKEN_SELF from .token import TOKEN_SINGLE_QUOTE_STRING from .token import TOKEN_TRUE from .token import TOKEN_UNDEFINED from .token import TOKEN_UNION from .token import TOKEN_WHITESPACE from .token import TOKEN_WILD from .token import Token if TYPE_CHECKING: from . import JSONPathEnvironment class Lexer: """Tokenize a JSONPath string. Some customization can be achieved by subclassing _Lexer_ and setting class attributes. Then setting `lexer_class` on a `JSONPathEnvironment`. Attributes: key_pattern: The regular expression pattern used to match mapping keys/properties. logical_not_pattern: The regular expression pattern used to match logical negation tokens. By default, `not` and `!` are equivalent. logical_and_pattern: The regular expression pattern used to match logical _and_ tokens. By default, `and` and `&&` are equivalent. logical_or_pattern: The regular expression pattern used to match logical _or_ tokens. By default, `or` and `||` are equivalent. """ key_pattern = r"[\u0080-\uFFFFa-zA-Z_][\u0080-\uFFFFa-zA-Z0-9_-]*" # ! or `not` logical_not_pattern = r"(?:not\b)|!" # && or `and` logical_and_pattern = r"&&|(?:and\b)" # || or `or` logical_or_pattern = r"\|\||(?:or\b)" def __init__(self, *, env: JSONPathEnvironment) -> None: self.env = env self.double_quote_pattern = r'"(?P(?:(?!(?(?:(?!(?\.)(?P{self.key_pattern})" # .~thing self.dot_key_pattern = ( r"(?P\.)" rf"(?P{re.escape(env.keys_selector_token)})" rf"(?P{self.key_pattern})" ) # /pattern/ or /pattern/flags self.re_pattern = r"/(?P(?:(?!(?[aims]*)" # func( self.function_pattern = r"(?P[a-z][a-z_0-9]+)(?P\()" self.rules = self.compile_strict_rules() if env.strict else self.compile_rules() def compile_rules(self) -> Pattern[str]: """Prepare regular expression rules.""" env_tokens = [ (TOKEN_ROOT, self.env.root_token), (TOKEN_PSEUDO_ROOT, self.env.pseudo_root_token), (TOKEN_SELF, self.env.self_token), (TOKEN_KEY, self.env.key_token), (TOKEN_UNION, self.env.union_token), (TOKEN_INTERSECTION, self.env.intersection_token), (TOKEN_FILTER_CONTEXT, self.env.filter_context_token), (TOKEN_KEYS, self.env.keys_selector_token), (TOKEN_KEYS_FILTER, self.env.keys_filter_token), ] rules = [ (TOKEN_DOUBLE_QUOTE_STRING, self.double_quote_pattern), (TOKEN_SINGLE_QUOTE_STRING, self.single_quote_pattern), (TOKEN_RE_PATTERN, self.re_pattern), (TOKEN_DOT_KEY_PROPERTY, self.dot_key_pattern), (TOKEN_DOT_PROPERTY, self.dot_property_pattern), ( TOKEN_FLOAT, r"(:?-?[0-9]+\.[0-9]+(?:[eE][+-]?[0-9]+)?)|(-?[0-9]+[eE]-[0-9]+)", ), (TOKEN_INT, r"-?[0-9]+(?:[eE]\+?[0-9]+)?"), (TOKEN_DDOT, r"\.\."), (TOKEN_DOT, r"\."), (TOKEN_AND, self.logical_and_pattern), (TOKEN_OR, self.logical_or_pattern), *[ (token, re.escape(pattern)) for token, pattern in sorted( env_tokens, key=lambda x: len(x[1]), reverse=True ) if pattern ], (TOKEN_WILD, r"\*"), (TOKEN_FILTER, r"\?"), (TOKEN_IN, r"in\b"), (TOKEN_TRUE, r"[Tt]rue\b"), (TOKEN_FALSE, r"[Ff]alse\b"), (TOKEN_NIL, r"[Nn]il\b"), (TOKEN_NULL, r"[Nn]ull\b"), (TOKEN_NONE, r"[Nn]one\b"), (TOKEN_CONTAINS, r"contains\b"), (TOKEN_UNDEFINED, r"undefined\b"), (TOKEN_MISSING, r"missing\b"), (TOKEN_LBRACKET, r"\["), (TOKEN_RBRACKET, r"]"), (TOKEN_COMMA, r","), (TOKEN_COLON, r":"), (TOKEN_EQ, r"=="), (TOKEN_NE, r"!="), (TOKEN_LG, r"<>"), (TOKEN_LE, r"<="), (TOKEN_GE, r">="), (TOKEN_RE, r"=~"), (TOKEN_LT, r"<"), (TOKEN_GT, r">"), (TOKEN_NOT, self.logical_not_pattern), # Must go after "!=" (TOKEN_FUNCTION, self.function_pattern), (TOKEN_NAME, self.key_pattern), # Must go after reserved words (TOKEN_LPAREN, r"\("), (TOKEN_RPAREN, r"\)"), (TOKEN_WHITESPACE, r"[ \n\t\r]+"), (TOKEN_ERROR, r"."), ] return re.compile( "|".join(f"(?P<{token}>{pattern})" for token, pattern in rules), re.DOTALL, ) def compile_strict_rules(self) -> Pattern[str]: """Prepare regular expression rules in strict mode.""" env_tokens = [ (TOKEN_ROOT, self.env.root_token), (TOKEN_SELF, self.env.self_token), ] rules = [ (TOKEN_DOUBLE_QUOTE_STRING, self.double_quote_pattern), (TOKEN_SINGLE_QUOTE_STRING, self.single_quote_pattern), (TOKEN_DOT_PROPERTY, self.dot_property_pattern), ( TOKEN_FLOAT, r"(:?-?[0-9]+\.[0-9]+(?:[eE][+-]?[0-9]+)?)|(-?[0-9]+[eE]-[0-9]+)", ), (TOKEN_INT, r"-?[0-9]+(?:[eE]\+?[0-9]+)?"), (TOKEN_DDOT, r"\.\."), (TOKEN_DOT, r"\."), (TOKEN_AND, r"&&"), (TOKEN_OR, r"\|\|"), *[ (token, re.escape(pattern)) for token, pattern in sorted( env_tokens, key=lambda x: len(x[1]), reverse=True ) if pattern ], (TOKEN_WILD, r"\*"), (TOKEN_FILTER, r"\?"), (TOKEN_TRUE, r"true\b"), (TOKEN_FALSE, r"false\b"), (TOKEN_NULL, r"null\b"), (TOKEN_LBRACKET, r"\["), (TOKEN_RBRACKET, r"]"), (TOKEN_COMMA, r","), (TOKEN_COLON, r":"), (TOKEN_EQ, r"=="), (TOKEN_NE, r"!="), (TOKEN_LG, r"<>"), (TOKEN_LE, r"<="), (TOKEN_GE, r">="), (TOKEN_LT, r"<"), (TOKEN_GT, r">"), (TOKEN_NOT, r"!"), # Must go after "!=" (TOKEN_FUNCTION, self.function_pattern), (TOKEN_NAME, self.key_pattern), # Must go after reserved words (TOKEN_LPAREN, r"\("), (TOKEN_RPAREN, r"\)"), (TOKEN_WHITESPACE, r"[ \n\t\r]+"), (TOKEN_ERROR, r"."), ] return re.compile( "|".join(f"(?P<{token}>{pattern})" for token, pattern in rules), re.DOTALL, ) def tokenize(self, path: str) -> Iterator[Token]: # noqa PLR0912 """Generate a sequence of tokens from a JSONPath string.""" _token = partial(Token, path=path) for match in self.rules.finditer(path): kind = match.lastgroup assert kind is not None if kind == TOKEN_DOT_PROPERTY: yield _token( kind=TOKEN_DOT, value=match.group("G_DOT"), index=match.start("G_DOT"), ) yield _token( kind=TOKEN_NAME, value=match.group("G_PROP"), index=match.start("G_PROP"), ) elif kind == TOKEN_DOT_KEY_PROPERTY: yield _token( kind=TOKEN_DOT, value=match.group("G_DOT_KEY"), index=match.start("G_DOT_KEY"), ) yield _token( kind=TOKEN_KEY_NAME, value=match.group("G_PROP_KEY"), index=match.start("G_PROP_KEY"), ) elif kind == TOKEN_DOUBLE_QUOTE_STRING: yield _token( kind=TOKEN_DOUBLE_QUOTE_STRING, value=match.group("G_DQUOTE"), index=match.start("G_DQUOTE"), ) elif kind == TOKEN_SINGLE_QUOTE_STRING: yield _token( kind=TOKEN_SINGLE_QUOTE_STRING, value=match.group("G_SQUOTE"), index=match.start("G_SQUOTE"), ) elif kind == TOKEN_RE_PATTERN: yield _token( kind=TOKEN_RE_PATTERN, value=match.group("G_RE"), index=match.start("G_RE"), ) yield _token( TOKEN_RE_FLAGS, value=match.group("G_RE_FLAGS"), index=match.start("G_RE_FLAGS"), ) elif kind in (TOKEN_NONE, TOKEN_NULL): yield _token( kind=TOKEN_NIL, value=match.group(), index=match.start(), ) elif kind == TOKEN_FUNCTION: yield _token( kind=TOKEN_FUNCTION, value=match.group("G_FUNC"), index=match.start("G_FUNC"), ) yield _token( kind=TOKEN_LPAREN, value=match.group("G_FUNC_PAREN"), index=match.start("G_FUNC_PAREN"), ) elif kind == TOKEN_ERROR: raise JSONPathSyntaxError( f"unexpected token {match.group()!r}", token=_token( TOKEN_ERROR, value=match.group(), index=match.start(), ), ) else: yield _token( kind=kind, value=match.group(), index=match.start(), ) jg-rp-python-jsonpath-830094f/jsonpath/lru_cache.py000066400000000000000000000077171512714264000223160ustar00rootroot00000000000000"""An LRU cache with a mapping interface implemented using an ordered dict.""" from collections import OrderedDict from threading import Lock from typing import Generic from typing import Iterator from typing import Optional from typing import Tuple from typing import TypeVar from typing import Union from typing import overload _KT = TypeVar("_KT") _VT = TypeVar("_VT") _T = TypeVar("_T") class LRUCache(Generic[_KT, _VT]): """An LRU cache with a mapping interface.""" def __init__(self, capacity: int): if capacity < 1: raise ValueError("cache capacity must be greater than zero") self.capacity = capacity self._cache: OrderedDict[_KT, _VT] = OrderedDict() def __getitem__(self, key: _KT) -> _VT: value = self._cache[key] # This will raise a KeyError if key is not cached self._cache.move_to_end(key) return value def __setitem__(self, key: _KT, value: _VT) -> None: try: self._cache.move_to_end(key) except KeyError: if len(self._cache) >= self.capacity: self._cache.popitem(last=False) self._cache[key] = value def __delitem__(self, key: _KT) -> None: del self._cache[key] def __len__(self) -> int: return len(self._cache) def __iter__(self) -> Iterator[_KT]: return reversed(self._cache) def __contains__(self, key: _KT) -> bool: return key in self._cache @overload def get(self, key: _KT) -> Optional[_VT]: ... @overload def get(self, key: _KT, default: _VT) -> _VT: ... @overload def get(self, key: _KT, default: _T) -> Union[_VT, _T]: ... def get(self, key: _KT, default: object = None) -> object: """Return the cached value for _key_ if _key_ is in the cache, else default.""" try: return self[key] except KeyError: return default def keys(self) -> Iterator[_KT]: """Return an iterator over this cache's keys.""" return reversed(self._cache.keys()) def values(self) -> Iterator[_VT]: """Return an iterator over this cache's values.""" return reversed(self._cache.values()) def items(self) -> Iterator[Tuple[_KT, _VT]]: """Return an iterator over this cache's key/value pairs.""" return reversed(self._cache.items()) class ThreadSafeLRUCache(LRUCache[_KT, _VT]): """A thread safe LRU cache.""" def __init__(self, capacity: int): super().__init__(capacity) self._lock = Lock() def __getitem__(self, key: _KT) -> _VT: with self._lock: return super().__getitem__(key) def __setitem__(self, key: _KT, value: _VT) -> None: with self._lock: return super().__setitem__(key, value) def __delitem__(self, key: _KT) -> None: with self._lock: return super().__delitem__(key) def __contains__(self, key: _KT) -> bool: with self._lock: return super().__contains__(key) @overload def get(self, key: _KT) -> Optional[_VT]: ... @overload def get(self, key: _KT, default: _VT) -> _VT: ... @overload def get(self, key: _KT, default: _T) -> Union[_VT, _T]: ... def get(self, key: _KT, default: object = None) -> object: """Return the cached value for _key_ if _key_ is in the cache, else default.""" # NOTE: self.__getitem__ is already acquiring the lock. try: return self[key] except KeyError: return default def keys(self) -> Iterator[_KT]: """Return an iterator over this cache's keys.""" with self._lock: return super().keys() def values(self) -> Iterator[_VT]: """Return an iterator over this cache's values.""" with self._lock: return super().values() def items(self) -> Iterator[Tuple[_KT, _VT]]: """Return an iterator over this cache's key/value pairs.""" with self._lock: return super().items() jg-rp-python-jsonpath-830094f/jsonpath/match.py000066400000000000000000000101071512714264000214500ustar00rootroot00000000000000"""The JSONPath match object, as returned from `JSONPath.finditer()`.""" from __future__ import annotations from typing import Any from typing import List from typing import Mapping from typing import Optional from typing import Sequence from typing import Tuple from typing import Union from .pointer import JSONPointer from .serialize import canonical_string FilterContextVars = Mapping[str, Any] PathPart = Union[int, str] class JSONPathMatch: """A matched object with a concrete path. Attributes: children: Matched child nodes. This will only be populated after all children have been visited, usually by using `findall()` or `list(finditer())`. obj: The matched object. parent: The immediate parent to this match in the JSON document. If this is the root node, _parent_ will be `None`. path: The canonical string representation of the path to this match. parts: The keys, indices and/or slices that make up the path to this match. root: A reference to the root node in the JSON document. """ __slots__ = ( "_filter_context", "children", "obj", "parent", "parts", "path", "root", ) pointer_class = JSONPointer def __init__( self, *, filter_context: FilterContextVars, obj: object, parent: Optional[JSONPathMatch], path: str, parts: Tuple[PathPart, ...], root: Union[Sequence[Any], Mapping[str, Any]], ) -> None: self._filter_context = filter_context self.children: List[JSONPathMatch] = [] self.obj: object = obj self.parent: Optional[JSONPathMatch] = parent self.parts: Tuple[PathPart, ...] = parts self.path: str = path self.root: Union[Sequence[Any], Mapping[str, Any]] = root def __str__(self) -> str: return f"{_truncate(str(self.obj), 5)!r} @ {_truncate(self.path, 5)}" def add_child(self, *children: JSONPathMatch) -> None: """Append one or more children to this match.""" self.children.extend(children) def new_child(self, obj: object, key: Union[int, str]) -> JSONPathMatch: """Return a new JSONPathMatch instance with this instance as its parent.""" return self.__class__( filter_context=self.filter_context(), obj=obj, parent=self, parts=self.parts + (key,), path=self.path + f"[{canonical_string(key) if isinstance(key, str) else key}]", root=self.root, ) def filter_context(self) -> FilterContextVars: """Return filter context data for this match.""" return self._filter_context def pointer(self) -> JSONPointer: """Return a `JSONPointer` pointing to this match's path.""" return JSONPointer.from_match(self) @property def value(self) -> object: """Return the value associated with this match/node.""" return self.obj def _truncate(val: str, num: int, end: str = "...") -> str: # Replaces consecutive whitespace with a single newline. # Treats quoted whitespace the same as unquoted whitespace. words = val.split() if len(words) < num: return " ".join(words) return " ".join(words[:num]) + end class NodeList(List[JSONPathMatch]): """List of JSONPathMatch objects, analogous to the spec's nodelist.""" def values(self) -> List[object]: """Return the values from this node list.""" return [match.obj for match in self] def values_or_singular(self) -> object: """Return the values from this node list.""" if len(self) == 1: return self[0].obj return [match.obj for match in self] def paths(self) -> List[str]: """Return a normalized path for each node in this node list.""" return [match.path for match in self] def empty(self) -> bool: """Return `True` if this node list is empty.""" return not bool(self) def __str__(self) -> str: return f"NodeList{super().__str__()}" jg-rp-python-jsonpath-830094f/jsonpath/parse.py000066400000000000000000000724251512714264000215010ustar00rootroot00000000000000"""The default JSONPath parser.""" from __future__ import annotations import json import re from typing import TYPE_CHECKING from typing import Callable from typing import Dict from typing import Iterable from typing import Iterator from typing import List from typing import Optional from typing import Union from jsonpath.function_extensions.filter_function import ExpressionType from jsonpath.function_extensions.filter_function import FilterFunction from .exceptions import JSONPathSyntaxError from .exceptions import JSONPathTypeError from .filter import CURRENT_KEY from .filter import FALSE from .filter import NIL from .filter import TRUE from .filter import UNDEFINED_LITERAL from .filter import BaseExpression from .filter import FilterContextPath from .filter import FilterExpression from .filter import FilterExpressionLiteral from .filter import FilterQuery from .filter import FloatLiteral from .filter import FunctionExtension from .filter import InfixExpression from .filter import IntegerLiteral from .filter import ListLiteral from .filter import Nil from .filter import PrefixExpression from .filter import RegexLiteral from .filter import RelativeFilterQuery from .filter import RootFilterQuery from .filter import StringLiteral from .path import JSONPath from .segments import JSONPathChildSegment from .segments import JSONPathRecursiveDescentSegment from .segments import JSONPathSegment from .selectors import Filter from .selectors import IndexSelector from .selectors import JSONPathSelector from .selectors import KeySelector from .selectors import KeysFilter from .selectors import KeysSelector from .selectors import NameSelector from .selectors import SingularQuerySelector from .selectors import SliceSelector from .selectors import WildcardSelector from .token import TOKEN_AND from .token import TOKEN_COLON from .token import TOKEN_COMMA from .token import TOKEN_CONTAINS from .token import TOKEN_DDOT from .token import TOKEN_DOT from .token import TOKEN_DOUBLE_QUOTE_STRING from .token import TOKEN_EOF from .token import TOKEN_EQ from .token import TOKEN_FALSE from .token import TOKEN_FILTER from .token import TOKEN_FILTER_CONTEXT from .token import TOKEN_FLOAT from .token import TOKEN_FUNCTION from .token import TOKEN_GE from .token import TOKEN_GT from .token import TOKEN_IN from .token import TOKEN_INT from .token import TOKEN_INTERSECTION from .token import TOKEN_KEY from .token import TOKEN_KEY_NAME from .token import TOKEN_KEYS from .token import TOKEN_KEYS_FILTER from .token import TOKEN_LBRACKET from .token import TOKEN_LE from .token import TOKEN_LG from .token import TOKEN_LPAREN from .token import TOKEN_LT from .token import TOKEN_MISSING from .token import TOKEN_NAME from .token import TOKEN_NE from .token import TOKEN_NIL from .token import TOKEN_NONE from .token import TOKEN_NOT from .token import TOKEN_NULL from .token import TOKEN_OR from .token import TOKEN_PSEUDO_ROOT from .token import TOKEN_RBRACKET from .token import TOKEN_RE from .token import TOKEN_RE_FLAGS from .token import TOKEN_RE_PATTERN from .token import TOKEN_ROOT from .token import TOKEN_RPAREN from .token import TOKEN_SELF from .token import TOKEN_SINGLE_QUOTE_STRING from .token import TOKEN_TRUE from .token import TOKEN_UNDEFINED from .token import TOKEN_UNION from .token import TOKEN_WHITESPACE from .token import TOKEN_WILD from .token import Token from .unescape import unescape_string if TYPE_CHECKING: from .env import JSONPathEnvironment from .stream import TokenStream # ruff: noqa: D102 INVALID_NAME_SELECTOR_CHARS = [ "\x00", "\x01", "\x02", "\x03", "\x04", "\x05", "\x06", "\x07", "\x08", "\t", "\n", "\x0b", "\x0c", "\r", "\x0e", "\x0f", "\x10", "\x11", "\x12", "\x13", "\x14", "\x15", "\x16", "\x17", "\x18", "\x19", "\x1a", "\x1b", "\x1c", "\x1d", "\x1e", "\x1f", ] class Parser: """A JSONPath parser bound to a JSONPathEnvironment.""" PRECEDENCE_LOWEST = 1 PRECEDENCE_LOGICAL_OR = 3 PRECEDENCE_LOGICAL_AND = 4 PRECEDENCE_RELATIONAL = 5 PRECEDENCE_MEMBERSHIP = 6 PRECEDENCE_PREFIX = 7 PRECEDENCES = { TOKEN_AND: PRECEDENCE_LOGICAL_AND, TOKEN_CONTAINS: PRECEDENCE_MEMBERSHIP, TOKEN_EQ: PRECEDENCE_RELATIONAL, TOKEN_GE: PRECEDENCE_RELATIONAL, TOKEN_GT: PRECEDENCE_RELATIONAL, TOKEN_IN: PRECEDENCE_MEMBERSHIP, TOKEN_LE: PRECEDENCE_RELATIONAL, TOKEN_LG: PRECEDENCE_RELATIONAL, TOKEN_LT: PRECEDENCE_RELATIONAL, TOKEN_NE: PRECEDENCE_RELATIONAL, TOKEN_NOT: PRECEDENCE_PREFIX, TOKEN_OR: PRECEDENCE_LOGICAL_OR, TOKEN_RE: PRECEDENCE_RELATIONAL, TOKEN_RPAREN: PRECEDENCE_LOWEST, } # Mapping of operator token to canonical string. BINARY_OPERATORS = { TOKEN_AND: "&&", TOKEN_CONTAINS: "contains", TOKEN_EQ: "==", TOKEN_GE: ">=", TOKEN_GT: ">", TOKEN_IN: "in", TOKEN_LE: "<=", TOKEN_LG: "<>", TOKEN_LT: "<", TOKEN_NE: "!=", TOKEN_OR: "||", TOKEN_RE: "=~", } COMPARISON_OPERATORS = frozenset( [ "==", ">=", ">", "<=", "<", "!=", "=~", ] ) # Infix operators that accept filter expression literals. INFIX_LITERAL_OPERATORS = frozenset( [ "==", ">=", ">", "<=", "<", "!=", "<>", "=~", "in", "contains", ] ) PREFIX_OPERATORS = frozenset( [ TOKEN_NOT, ] ) RE_FLAG_MAP = { "a": re.A, "i": re.I, "m": re.M, "s": re.S, } _INVALID_NAME_SELECTOR_CHARS = f"[{''.join(INVALID_NAME_SELECTOR_CHARS)}]" RE_INVALID_NAME_SELECTOR = re.compile( rf'(?:(?!(? None: self.env = env self.token_map: Dict[str, Callable[[TokenStream], BaseExpression]] = { TOKEN_DOUBLE_QUOTE_STRING: self.parse_string_literal, TOKEN_PSEUDO_ROOT: self.parse_absolute_query, TOKEN_FALSE: self.parse_boolean, TOKEN_FILTER_CONTEXT: self.parse_filter_context_path, TOKEN_FLOAT: self.parse_float_literal, TOKEN_FUNCTION: self.parse_function_extension, TOKEN_INT: self.parse_integer_literal, TOKEN_KEY: self.parse_current_key, TOKEN_LBRACKET: self.parse_list_literal, TOKEN_LPAREN: self.parse_grouped_expression, TOKEN_MISSING: self.parse_undefined, TOKEN_NIL: self.parse_nil, TOKEN_NONE: self.parse_nil, TOKEN_NOT: self.parse_prefix_expression, TOKEN_NULL: self.parse_nil, TOKEN_RE_PATTERN: self.parse_regex, TOKEN_ROOT: self.parse_absolute_query, TOKEN_SELF: self.parse_relative_query, TOKEN_SINGLE_QUOTE_STRING: self.parse_string_literal, TOKEN_TRUE: self.parse_boolean, TOKEN_UNDEFINED: self.parse_undefined, } self.list_item_map: Dict[str, Callable[[TokenStream], BaseExpression]] = { TOKEN_FALSE: self.parse_boolean, TOKEN_FLOAT: self.parse_float_literal, TOKEN_INT: self.parse_integer_literal, TOKEN_NIL: self.parse_nil, TOKEN_NONE: self.parse_nil, TOKEN_NULL: self.parse_nil, TOKEN_DOUBLE_QUOTE_STRING: self.parse_string_literal, TOKEN_SINGLE_QUOTE_STRING: self.parse_string_literal, TOKEN_TRUE: self.parse_boolean, } self.function_argument_map: Dict[ str, Callable[[TokenStream], BaseExpression] ] = { TOKEN_DOUBLE_QUOTE_STRING: self.parse_string_literal, TOKEN_PSEUDO_ROOT: self.parse_absolute_query, TOKEN_FALSE: self.parse_boolean, TOKEN_FILTER_CONTEXT: self.parse_filter_context_path, TOKEN_FLOAT: self.parse_float_literal, TOKEN_FUNCTION: self.parse_function_extension, TOKEN_INT: self.parse_integer_literal, TOKEN_KEY: self.parse_current_key, TOKEN_NIL: self.parse_nil, TOKEN_NONE: self.parse_nil, TOKEN_NULL: self.parse_nil, TOKEN_ROOT: self.parse_absolute_query, TOKEN_SELF: self.parse_relative_query, TOKEN_SINGLE_QUOTE_STRING: self.parse_string_literal, TOKEN_TRUE: self.parse_boolean, } def parse(self, stream: TokenStream) -> Iterator[JSONPathSegment]: """Parse a JSONPath query from a stream of tokens.""" # Leading whitespace is not allowed in strict mode. if stream.skip_whitespace() and self.env.strict: raise JSONPathSyntaxError( "unexpected leading whitespace", token=stream.current() ) # Trailing whitespace is not allowed in strict mode. if ( self.env.strict and stream.tokens and stream.tokens[-1].kind == TOKEN_WHITESPACE ): raise JSONPathSyntaxError( "unexpected trailing whitespace", token=stream.tokens[-1] ) token = stream.current() if token.kind == TOKEN_ROOT or ( token.kind == TOKEN_PSEUDO_ROOT and not self.env.strict ): stream.next() elif self.env.strict: # Raises a syntax error because the current token is not TOKEN_ROOT. stream.expect(TOKEN_ROOT) yield from self.parse_query(stream) if stream.current().kind not in (TOKEN_EOF, TOKEN_INTERSECTION, TOKEN_UNION): raise JSONPathSyntaxError( f"unexpected token {stream.current().value!r}", token=stream.current(), ) def parse_query(self, stream: TokenStream) -> Iterable[JSONPathSegment]: """Parse a JSONPath query string. This method assumes the root, current or pseudo root identifier has already been consumed. """ if not self.env.strict and stream.current().kind in { TOKEN_NAME, TOKEN_WILD, TOKEN_KEYS, TOKEN_KEY_NAME, }: # A non-standard "bare" path. One that starts with a shorthand selector # without a leading identifier (`$`, `@`, `^` or `_`). # # When no identifier is given, a root query (`$`) is assumed. token = stream.current() selector = self.parse_shorthand_selector(stream) yield JSONPathChildSegment(env=self.env, token=token, selectors=(selector,)) while True: stream.skip_whitespace() token = stream.next() if token.kind == TOKEN_DOT: selector = self.parse_shorthand_selector(stream) yield JSONPathChildSegment( env=self.env, token=token, selectors=(selector,) ) elif token.kind == TOKEN_DDOT: if stream.current().kind == TOKEN_LBRACKET: selectors = tuple(self.parse_bracketed_selection(stream)) else: selectors = (self.parse_shorthand_selector(stream),) yield JSONPathRecursiveDescentSegment( env=self.env, token=token, selectors=selectors ) elif token.kind == TOKEN_LBRACKET: stream.pos -= 1 yield JSONPathChildSegment( env=self.env, token=token, selectors=tuple(self.parse_bracketed_selection(stream)), ) elif token.kind == TOKEN_EOF: break else: # An embedded query. Put the token back on the stream. stream.pos -= 1 break def parse_shorthand_selector(self, stream: TokenStream) -> JSONPathSelector: token = stream.next() if token.kind == TOKEN_NAME: return NameSelector( env=self.env, token=token, name=token.value, ) if token.kind == TOKEN_KEY_NAME: return KeySelector( env=self.env, token=token, key=token.value, ) if token.kind == TOKEN_WILD: return WildcardSelector( env=self.env, token=token, ) if token.kind == TOKEN_KEYS: if stream.current().kind == TOKEN_NAME: return KeySelector( env=self.env, token=token, key=self._decode_string_literal(stream.next()), ) return KeysSelector( env=self.env, token=token, ) raise JSONPathSyntaxError("expected a shorthand selector", token=token) def parse_bracketed_selection(self, stream: TokenStream) -> List[JSONPathSelector]: # noqa: PLR0912, PLR0915 segment_token = stream.eat(TOKEN_LBRACKET) selectors: List[JSONPathSelector] = [] while True: stream.skip_whitespace() token = stream.current() if token.kind == TOKEN_RBRACKET: break if token.kind == TOKEN_INT: if ( stream.peek().kind == TOKEN_COLON or stream.peek(2).kind == TOKEN_COLON ): selectors.append(self.parse_slice(stream)) else: self._raise_for_leading_zero(token) selectors.append( IndexSelector( env=self.env, token=token, index=int(token.value), ) ) stream.next() elif token.kind in ( TOKEN_DOUBLE_QUOTE_STRING, TOKEN_SINGLE_QUOTE_STRING, ): selectors.append( NameSelector( env=self.env, token=token, name=self._decode_string_literal(token), ), ) stream.next() elif token.kind == TOKEN_COLON: selectors.append(self.parse_slice(stream)) elif token.kind == TOKEN_WILD: selectors.append(WildcardSelector(env=self.env, token=token)) stream.next() elif token.kind == TOKEN_KEYS: stream.eat(TOKEN_KEYS) if stream.current().kind in ( TOKEN_DOUBLE_QUOTE_STRING, TOKEN_SINGLE_QUOTE_STRING, ): selectors.append( KeySelector( env=self.env, token=token, key=self._decode_string_literal(stream.next()), ) ) else: selectors.append(KeysSelector(env=self.env, token=token)) elif token.kind == TOKEN_FILTER: selectors.append(self.parse_filter_selector(stream)) elif token.kind == TOKEN_KEYS_FILTER: selectors.append(self.parse_filter_selector(stream, keys=True)) elif token.kind in (TOKEN_ROOT, TOKEN_NAME): selectors.append(self.parse_singular_query_selector(stream)) elif token.kind == TOKEN_EOF: raise JSONPathSyntaxError("unexpected end of query", token=token) else: raise JSONPathSyntaxError( f"unexpected token in bracketed selection {token.kind!r}", token=token, ) stream.skip_whitespace() if stream.current().kind == TOKEN_EOF: raise JSONPathSyntaxError( "unexpected end of segment", token=stream.current(), ) if stream.current().kind != TOKEN_RBRACKET: stream.eat(TOKEN_COMMA) stream.skip_whitespace() if stream.current().kind == TOKEN_RBRACKET: raise JSONPathSyntaxError( "unexpected trailing comma", token=stream.current() ) stream.eat(TOKEN_RBRACKET) if not selectors: raise JSONPathSyntaxError("empty bracketed segment", token=segment_token) return selectors def parse_slice(self, stream: TokenStream) -> SliceSelector: """Parse a slice JSONPath expression from a stream of tokens.""" token = stream.current() start: Optional[int] = None stop: Optional[int] = None step: Optional[int] = None def _maybe_index(token: Token) -> bool: if token.kind == TOKEN_INT: if len(token.value) > 1 and token.value.startswith(("0", "-0")): raise JSONPathSyntaxError( f"invalid index {token.value!r}", token=token ) return True return False # 1: or : if _maybe_index(stream.current()): start = int(stream.current().value) stream.next() stream.skip_whitespace() stream.expect(TOKEN_COLON) stream.next() stream.skip_whitespace() # 1 or 1: or : or ? if _maybe_index(stream.current()): stop = int(stream.current().value) stream.next() stream.skip_whitespace() if stream.current().kind == TOKEN_COLON: stream.next() elif stream.current().kind == TOKEN_COLON: stream.expect(TOKEN_COLON) stream.next() # 1 or ? stream.skip_whitespace() if _maybe_index(stream.current()): step = int(stream.current().value) stream.next() return SliceSelector( env=self.env, token=token, start=start, stop=stop, step=step, ) def parse_filter_selector( self, stream: TokenStream, *, keys: bool = False ) -> Union[Filter, KeysFilter]: token = stream.next() expr = self.parse_filter_expression(stream) if self.env.well_typed and isinstance(expr, FunctionExtension): func = self.env.function_extensions.get(expr.name) if ( func and isinstance(func, FilterFunction) and func.return_type == ExpressionType.VALUE ): raise JSONPathTypeError( f"result of {expr.name}() must be compared", token=token ) if isinstance(expr, (FilterExpressionLiteral, Nil)): raise JSONPathSyntaxError( "filter expression literals outside of " "function expressions must be compared", token=token, ) if keys: return KeysFilter( env=self.env, token=token, expression=FilterExpression(expr) ) return Filter(env=self.env, token=token, expression=FilterExpression(expr)) def parse_boolean(self, stream: TokenStream) -> BaseExpression: if stream.next().kind == TOKEN_TRUE: return TRUE return FALSE def parse_nil(self, stream: TokenStream) -> BaseExpression: stream.next() return NIL def parse_undefined(self, stream: TokenStream) -> BaseExpression: stream.next() return UNDEFINED_LITERAL def parse_string_literal(self, stream: TokenStream) -> BaseExpression: return StringLiteral(value=self._decode_string_literal(stream.next())) def parse_integer_literal(self, stream: TokenStream) -> BaseExpression: token = stream.next() value = token.value if self.env.strict and value.startswith("0") and len(value) > 1: raise JSONPathSyntaxError("invalid integer literal", token=token) # Convert to float first to handle scientific notation. return IntegerLiteral(value=int(float(value))) def parse_float_literal(self, stream: TokenStream) -> BaseExpression: token = stream.next() value = token.value if value.startswith("0") and len(value.split(".")[0]) > 1: raise JSONPathSyntaxError("invalid float literal", token=token) return FloatLiteral(value=float(value)) def parse_prefix_expression(self, stream: TokenStream) -> BaseExpression: token = stream.next() assert token.kind == TOKEN_NOT return PrefixExpression( operator="!", right=self.parse_filter_expression( stream, precedence=self.PRECEDENCE_PREFIX ), ) def parse_infix_expression( self, stream: TokenStream, left: BaseExpression ) -> BaseExpression: token = stream.next() precedence = self.PRECEDENCES.get(token.kind, self.PRECEDENCE_LOWEST) right = self.parse_filter_expression(stream, precedence) operator = self.BINARY_OPERATORS[token.kind] if self.env.well_typed and operator in self.COMPARISON_OPERATORS: self._raise_for_non_comparable_function(left, token) self._raise_for_non_comparable_function(right, token) if operator not in self.INFIX_LITERAL_OPERATORS: if isinstance(left, (FilterExpressionLiteral, Nil)): raise JSONPathSyntaxError( "filter expression literals outside of " "function expressions must be compared", token=token, ) if isinstance(right, (FilterExpressionLiteral, Nil)): raise JSONPathSyntaxError( "filter expression literals outside of " "function expressions must be compared", token=token, ) return InfixExpression(left, operator, right) def parse_grouped_expression(self, stream: TokenStream) -> BaseExpression: _token = stream.eat(TOKEN_LPAREN) expr = self.parse_filter_expression(stream) while stream.current().kind != TOKEN_RPAREN: token = stream.current() if token.kind in (TOKEN_EOF, TOKEN_RBRACKET): raise JSONPathSyntaxError("unbalanced parentheses", token=_token) expr = self.parse_infix_expression(stream, expr) stream.eat(TOKEN_RPAREN) return expr def parse_absolute_query(self, stream: TokenStream) -> BaseExpression: root = stream.next() # Could be TOKEN_ROOT or TOKEN_PSEUDO_ROOT return RootFilterQuery( JSONPath( env=self.env, segments=self.parse_query(stream), pseudo_root=root.kind == TOKEN_PSEUDO_ROOT, ) ) def parse_relative_query(self, stream: TokenStream) -> BaseExpression: stream.eat(TOKEN_SELF) return RelativeFilterQuery( JSONPath(env=self.env, segments=self.parse_query(stream)) ) def parse_singular_query_selector( self, stream: TokenStream ) -> SingularQuerySelector: token = ( stream.next() if stream.current().kind == TOKEN_ROOT else stream.current() ) query = JSONPath(env=self.env, segments=self.parse_query(stream)) if not query.singular_query(): raise JSONPathSyntaxError( "embedded query selectors must be singular queries", token=token ) return SingularQuerySelector( env=self.env, token=token, query=query, ) def parse_current_key(self, stream: TokenStream) -> BaseExpression: stream.next() return CURRENT_KEY def parse_filter_context_path(self, stream: TokenStream) -> BaseExpression: stream.next() return FilterContextPath( JSONPath(env=self.env, segments=self.parse_query(stream)) ) def parse_regex(self, stream: TokenStream) -> BaseExpression: pattern = stream.current().value flags = 0 if stream.peek().kind == TOKEN_RE_FLAGS: stream.next() for flag in set(stream.next().value): flags |= self.RE_FLAG_MAP[flag] return RegexLiteral(value=re.compile(pattern, flags)) def parse_list_literal(self, stream: TokenStream) -> BaseExpression: stream.eat(TOKEN_LBRACKET) list_items: List[BaseExpression] = [] while True: stream.skip_whitespace() if stream.current().kind == TOKEN_RBRACKET: break try: list_items.append(self.list_item_map[stream.current().kind](stream)) except KeyError as err: raise JSONPathSyntaxError( f"unexpected {stream.current().value!r}", token=stream.current(), ) from err stream.skip_whitespace() if stream.current().kind != TOKEN_RBRACKET: stream.eat(TOKEN_COMMA) stream.skip_whitespace() stream.eat(TOKEN_RBRACKET) return ListLiteral(list_items) def parse_function_extension(self, stream: TokenStream) -> BaseExpression: function_arguments: List[BaseExpression] = [] function_token = stream.next() stream.eat(TOKEN_LPAREN) while True: stream.skip_whitespace() token = stream.current() if token.kind == TOKEN_RPAREN: break try: func = self.function_argument_map[token.kind] except KeyError as err: raise JSONPathSyntaxError( f"unexpected {token.value!r}", token=token ) from err expr = func(stream) stream.skip_whitespace() while stream.current().kind in self.BINARY_OPERATORS: expr = self.parse_infix_expression(stream, expr) function_arguments.append(expr) stream.skip_whitespace() if stream.current().kind != TOKEN_RPAREN: stream.eat(TOKEN_COMMA) stream.eat(TOKEN_RPAREN) return FunctionExtension( function_token.value, self.env.validate_function_extension_signature( function_token, function_arguments ), ) def parse_filter_expression( self, stream: TokenStream, precedence: int = PRECEDENCE_LOWEST ) -> BaseExpression: stream.skip_whitespace() token = stream.current() try: left = self.token_map[token.kind](stream) except KeyError as err: if token.kind in (TOKEN_EOF, TOKEN_RBRACKET): msg = "end of expression" else: msg = repr(token.value) raise JSONPathSyntaxError(f"unexpected {msg}", token=token) from err while True: stream.skip_whitespace() kind = stream.current().kind if ( kind not in self.BINARY_OPERATORS or self.PRECEDENCES.get(kind, self.PRECEDENCE_LOWEST) < precedence ): break left = self.parse_infix_expression(stream, left) return left def _decode_string_literal(self, token: Token) -> str: if self.env.strict: # For strict compliance with RC 9535, we must unescape string literals # ourself. RFC 9535 is more strict than json.loads when it comes to # parsing \uXXXX escape sequences. return unescape_string( token.value, token, "'" if token.kind == TOKEN_SINGLE_QUOTE_STRING else '"', ) if self.env.unicode_escape: if token.kind == TOKEN_SINGLE_QUOTE_STRING: value = token.value.replace('"', '\\"').replace("\\'", "'") else: value = token.value try: rv = json.loads(f'"{value}"') assert isinstance(rv, str) return rv except json.JSONDecodeError as err: message = f"decode error: {str(err).split(':')[1]}" raise JSONPathSyntaxError(message, token=token) from None return token.value def _raise_for_non_comparable_function( self, expr: BaseExpression, token: Token ) -> None: if isinstance(expr, FilterQuery) and not expr.path.singular_query(): raise JSONPathTypeError("non-singular query is not comparable", token=token) if isinstance(expr, FunctionExtension): func = self.env.function_extensions.get(expr.name) if ( isinstance(func, FilterFunction) and func.return_type != ExpressionType.VALUE ): raise JSONPathTypeError( f"result of {expr.name}() is not comparable", token ) def _raise_for_leading_zero(self, token: Token) -> None: if ( len(token.value) > 1 and token.value.startswith("0") ) or token.value.startswith("-0"): raise JSONPathSyntaxError("leading zero in index selector", token=token) jg-rp-python-jsonpath-830094f/jsonpath/patch.py000066400000000000000000000617711512714264000214700ustar00rootroot00000000000000"""JSON Patch, as per RFC 6902.""" from __future__ import annotations import copy import json from abc import ABC from abc import abstractmethod from io import IOBase from typing import Any from typing import Dict from typing import Iterable from typing import List from typing import Mapping from typing import MutableMapping from typing import MutableSequence from typing import TypeVar from typing import Union from jsonpath._data import load_data from jsonpath.exceptions import JSONPatchError from jsonpath.exceptions import JSONPatchTestFailure from jsonpath.exceptions import JSONPointerError from jsonpath.exceptions import JSONPointerIndexError from jsonpath.exceptions import JSONPointerKeyError from jsonpath.exceptions import JSONPointerTypeError from jsonpath.pointer import UNDEFINED from jsonpath.pointer import JSONPointer class Op(ABC): """One of the JSON Patch operations.""" name = "base" @abstractmethod def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" @abstractmethod def asdict(self) -> Dict[str, object]: """Return a dictionary representation of this operation.""" class OpAdd(Op): """The JSON Patch _add_ operation.""" __slots__ = ("path", "value") name = "add" def __init__(self, path: JSONPointer, value: object) -> None: self.path = path self.value = value def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" parent, obj = self.path.resolve_parent(data) if parent is None: # Replace the root object. # The following op, if any, will raise a JSONPatchError if needed. return self.value # type: ignore target = self.path.parts[-1] if isinstance(parent, MutableSequence): if obj is UNDEFINED: if target == "-": parent.append(self.value) else: index = self.path._index(target) # noqa: SLF001 if index == len(parent): parent.append(self.value) else: raise JSONPatchError("index out of range") else: parent.insert(int(target), self.value) elif isinstance(parent, MutableMapping): parent[str(target)] = self.value else: raise JSONPatchError( f"unexpected operation on {parent.__class__.__name__!r}" ) return data def asdict(self) -> Dict[str, object]: """Return a dictionary representation of this operation.""" return {"op": self.name, "path": str(self.path), "value": self.value} class OpAddNe(OpAdd): """A non-standard _add if not exists_ operation. This is like _OpAdd_, but only adds object/dict keys/values if they key does not already exist. **New in version 1.2.0** """ __slots__ = ("path", "value") name = "addne" def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" parent, obj = self.path.resolve_parent(data) if parent is None: # Replace the root object. # The following op, if any, will raise a JSONPatchError if needed. return self.value # type: ignore target = self.path.parts[-1] if isinstance(parent, MutableSequence): if obj is UNDEFINED: parent.append(self.value) else: parent.insert(int(target), self.value) elif isinstance(parent, MutableMapping) and target not in parent: parent[target] = self.value return data class OpAddAp(OpAdd): """A non-standard add operation that appends to arrays/lists . This is like _OpAdd_, but assumes an index of "-" if the path can not be resolved rather than raising a JSONPatchError. **New in version 1.2.0** """ __slots__ = ("path", "value") name = "addap" def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" parent, obj = self.path.resolve_parent(data) if parent is None: # Replace the root object. # The following op, if any, will raise a JSONPatchError if needed. return self.value # type: ignore target = self.path.parts[-1] if isinstance(parent, MutableSequence): if obj is UNDEFINED: parent.append(self.value) else: parent.insert(int(target), self.value) elif isinstance(parent, MutableMapping): parent[target] = self.value else: raise JSONPatchError( f"unexpected operation on {parent.__class__.__name__!r}" ) return data class OpRemove(Op): """The JSON Patch _remove_ operation.""" __slots__ = ("path",) name = "remove" def __init__(self, path: JSONPointer) -> None: self.path = path def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" parent, obj = self.path.resolve_parent(data) if parent is None: raise JSONPatchError("can't remove root") if isinstance(parent, MutableSequence): if obj is UNDEFINED: raise JSONPatchError("can't remove nonexistent item") del parent[int(self.path.parts[-1])] elif isinstance(parent, MutableMapping): if obj is UNDEFINED: raise JSONPatchError("can't remove nonexistent property") del parent[str(self.path.parts[-1])] else: raise JSONPatchError( f"unexpected operation on {parent.__class__.__name__!r}" ) return data def asdict(self) -> Dict[str, object]: """Return a dictionary representation of this operation.""" return {"op": self.name, "path": str(self.path)} class OpReplace(Op): """The JSON Patch _replace_ operation.""" __slots__ = ("path", "value") name = "replace" def __init__(self, path: JSONPointer, value: object) -> None: self.path = path self.value = value def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" parent, obj = self.path.resolve_parent(data) if parent is None: return self.value # type: ignore if isinstance(parent, MutableSequence): if obj is UNDEFINED: raise JSONPatchError("can't replace nonexistent item") parent[int(self.path.parts[-1])] = self.value elif isinstance(parent, MutableMapping): if obj is UNDEFINED: raise JSONPatchError("can't replace nonexistent property") parent[str(self.path.parts[-1])] = self.value else: raise JSONPatchError( f"unexpected operation on {parent.__class__.__name__!r}" ) return data def asdict(self) -> Dict[str, object]: """Return a dictionary representation of this operation.""" return {"op": self.name, "path": str(self.path), "value": self.value} class OpMove(Op): """The JSON Patch _move_ operation.""" __slots__ = ("source", "dest") name = "move" def __init__(self, from_: JSONPointer, path: JSONPointer) -> None: self.source = from_ self.dest = path def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" if self.dest.is_relative_to(self.source): raise JSONPatchError("can't move object to one of its own children") source_parent, source_obj = self.source.resolve_parent(data) if source_obj is UNDEFINED: raise JSONPatchError("source object does not exist") if isinstance(source_parent, MutableSequence): del source_parent[int(self.source.parts[-1])] if isinstance(source_parent, MutableMapping): del source_parent[str(self.source.parts[-1])] dest_parent, _ = self.dest.resolve_parent(data) if dest_parent is None: # Move source to root return source_obj # type: ignore if isinstance(dest_parent, MutableSequence): dest_parent.insert(int(self.dest.parts[-1]), source_obj) elif isinstance(dest_parent, MutableMapping): dest_parent[str(self.dest.parts[-1])] = source_obj else: raise JSONPatchError( f"unexpected operation on {dest_parent.__class__.__name__!r}" ) return data def asdict(self) -> Dict[str, object]: """Return a dictionary representation of this operation.""" return {"op": self.name, "from": str(self.source), "path": str(self.dest)} class OpCopy(Op): """The JSON Patch _copy_ operation.""" __slots__ = ("source", "dest") name = "copy" def __init__(self, from_: JSONPointer, path: JSONPointer) -> None: self.source = from_ self.dest = path def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" source_parent, source_obj = self.source.resolve_parent(data) if source_obj is UNDEFINED: raise JSONPatchError("source object does not exist") dest_parent, dest_obj = self.dest.resolve_parent(data) if dest_parent is None: # Copy source to root return copy.deepcopy(source_obj) # type: ignore if isinstance(dest_parent, MutableSequence): dest_parent.insert(int(self.dest.parts[-1]), copy.deepcopy(source_obj)) elif isinstance(dest_parent, MutableMapping): dest_parent[str(self.dest.parts[-1])] = copy.deepcopy(source_obj) else: raise JSONPatchError( f"unexpected operation on {dest_parent.__class__.__name__!r}" ) return data def asdict(self) -> Dict[str, object]: """Return a dictionary representation of this operation.""" return {"op": self.name, "from": str(self.source), "path": str(self.dest)} class OpTest(Op): """The JSON Patch _test_ operation.""" __slots__ = ("path", "value") name = "test" def __init__(self, path: JSONPointer, value: object) -> None: self.path = path self.value = value def apply( self, data: Union[MutableSequence[object], MutableMapping[str, object]] ) -> Union[MutableSequence[object], MutableMapping[str, object]]: """Apply this patch operation to _data_.""" _, obj = self.path.resolve_parent(data) if not obj == self.value: raise JSONPatchTestFailure return data def asdict(self) -> Dict[str, object]: """Return a dictionary representation of this operation.""" return {"op": self.name, "path": str(self.path), "value": self.value} Self = TypeVar("Self", bound="JSONPatch") class JSONPatch: """Modify JSON-like data with JSON Patch. RFC 6902 defines operations to manipulate a JSON document. `JSONPatch` supports parsing and applying standard JSON Patch formatted operations, and provides a Python builder API following the same semantics as RFC 6902. Arguments: ops: A JSON Patch formatted document or equivalent Python objects. unicode_escape: If `True`, UTF-16 escape sequences will be decoded before parsing JSON pointers. uri_decode: If `True`, JSON pointers will be unescaped using _urllib_ before being parsed. Raises: JSONPatchError: If _ops_ is given and any of the provided operations is malformed. """ def __init__( self, ops: Union[str, IOBase, Iterable[Mapping[str, object]], None] = None, *, unicode_escape: bool = True, uri_decode: bool = False, ) -> None: self.ops: List[Op] = [] self.unicode_escape = unicode_escape self.uri_decode = uri_decode if ops: self._load(ops) def _load(self, patch: Union[str, IOBase, Iterable[Mapping[str, object]]]) -> None: if isinstance(patch, IOBase): _patch = json.loads(patch.read()) elif isinstance(patch, str): _patch = json.loads(patch) else: _patch = patch try: self._build(_patch) except TypeError as err: raise JSONPatchError( "expected a sequence of patch operations, " f"found {_patch.__class__.__name__!r}" ) from err def _build(self, patch: Iterable[Mapping[str, object]]) -> None: for i, operation in enumerate(patch): try: op = operation["op"] except KeyError as err: raise JSONPatchError(f"missing 'op' member at op {i}") from err if op == "add": self.add( path=self._op_pointer(operation, "path", "add", i), value=self._op_value(operation, "value", "add", i), ) elif op == "addne": self.addne( path=self._op_pointer(operation, "path", "addne", i), value=self._op_value(operation, "value", "addne", i), ) elif op == "addap": self.addap( path=self._op_pointer(operation, "path", "addap", i), value=self._op_value(operation, "value", "addap", i), ) elif op == "remove": self.remove(path=self._op_pointer(operation, "path", "add", i)) elif op == "replace": self.replace( path=self._op_pointer(operation, "path", "replace", i), value=self._op_value(operation, "value", "replace", i), ) elif op == "move": self.move( from_=self._op_pointer(operation, "from", "move", i), path=self._op_pointer(operation, "path", "move", i), ) elif op == "copy": self.copy( from_=self._op_pointer(operation, "from", "copy", i), path=self._op_pointer(operation, "path", "copy", i), ) elif op == "test": self.test( path=self._op_pointer(operation, "path", "test", i), value=self._op_value(operation, "value", "test", i), ) else: raise JSONPatchError( "expected 'op' to be one of 'add', 'remove', 'replace', " f"'move', 'copy' or 'test' ({op}:{i})" ) def _op_pointer( self, operation: Mapping[str, object], key: str, op: str, i: int ) -> JSONPointer: try: pointer = operation[key] except KeyError as err: raise JSONPatchError(f"missing property {key!r} ({op}:{i})") from err if not isinstance(pointer, str): raise JSONPatchError( f"expected a JSON Pointer string for {key!r}, " f"found {pointer.__class__.__name__!r} " f"({op}:{i})" ) try: return JSONPointer( pointer, unicode_escape=self.unicode_escape, uri_decode=self.uri_decode ) except JSONPointerError as err: raise JSONPatchError(f"{err} ({op}:{i})") from err def _op_value( self, operation: Mapping[str, object], key: str, op: str, i: int ) -> object: try: return operation[key] except KeyError as err: raise JSONPatchError(f"missing property {key!r} ({op}:{i})") from err def _ensure_pointer(self, path: Union[str, JSONPointer]) -> JSONPointer: if isinstance(path, str): return JSONPointer( path, unicode_escape=self.unicode_escape, uri_decode=self.uri_decode, ) assert isinstance(path, JSONPointer) return path def add(self: Self, path: Union[str, JSONPointer], value: object) -> Self: """Append an _add_ operation to this patch. Arguments: path: A string representation of a JSON Pointer, or one that has already been parsed. value: The object to add. Returns: This `JSONPatch` instance, so we can build a JSON Patch by chaining calls to JSON Patch operation methods. """ pointer = self._ensure_pointer(path) self.ops.append(OpAdd(path=pointer, value=value)) return self def addne(self: Self, path: Union[str, JSONPointer], value: object) -> Self: """Append an _addne_ operation to this patch. Arguments: path: A string representation of a JSON Pointer, or one that has already been parsed. value: The object to add. Returns: This `JSONPatch` instance, so we can build a JSON Patch by chaining calls to JSON Patch operation methods. """ pointer = self._ensure_pointer(path) self.ops.append(OpAddNe(path=pointer, value=value)) return self def addap(self: Self, path: Union[str, JSONPointer], value: object) -> Self: """Append an _addap_ operation to this patch. Arguments: path: A string representation of a JSON Pointer, or one that has already been parsed. value: The object to add. Returns: This `JSONPatch` instance, so we can build a JSON Patch by chaining calls to JSON Patch operation methods. """ pointer = self._ensure_pointer(path) self.ops.append(OpAddAp(path=pointer, value=value)) return self def remove(self: Self, path: Union[str, JSONPointer]) -> Self: """Append a _remove_ operation to this patch. Arguments: path: A string representation of a JSON Pointer, or one that has already been parsed. Returns: This `JSONPatch` instance, so we can build a JSON Patch by chaining calls to JSON Patch operation methods. """ pointer = self._ensure_pointer(path) self.ops.append(OpRemove(path=pointer)) return self def replace(self: Self, path: Union[str, JSONPointer], value: object) -> Self: """Append a _replace_ operation to this patch. Arguments: path: A string representation of a JSON Pointer, or one that has already been parsed. value: The object to add. Returns: This `JSONPatch` instance, so we can build a JSON Patch by chaining calls to JSON Patch operation methods. """ pointer = self._ensure_pointer(path) self.ops.append(OpReplace(path=pointer, value=value)) return self def move( self: Self, from_: Union[str, JSONPointer], path: Union[str, JSONPointer] ) -> Self: """Append a _move_ operation to this patch. Arguments: from_: A string representation of a JSON Pointer, or one that has already been parsed. path: A string representation of a JSON Pointer, or one that has already been parsed. Returns: This `JSONPatch` instance, so we can build a JSON Patch by chaining calls to JSON Patch operation methods. """ source_pointer = self._ensure_pointer(from_) dest_pointer = self._ensure_pointer(path) self.ops.append(OpMove(from_=source_pointer, path=dest_pointer)) return self def copy( self: Self, from_: Union[str, JSONPointer], path: Union[str, JSONPointer] ) -> Self: """Append a _copy_ operation to this patch. Arguments: from_: A string representation of a JSON Pointer, or one that has already been parsed. path: A string representation of a JSON Pointer, or one that has already been parsed. Returns: This `JSONPatch` instance, so we can build a JSON Patch by chaining calls to JSON Patch operation methods. """ source_pointer = self._ensure_pointer(from_) dest_pointer = self._ensure_pointer(path) self.ops.append(OpCopy(from_=source_pointer, path=dest_pointer)) return self def test(self: Self, path: Union[str, JSONPointer], value: object) -> Self: """Append a test operation to this patch. Arguments: path: A string representation of a JSON Pointer, or one that has already been parsed. value: The object to test. Returns: This `JSONPatch` instance, so we can build a JSON Patch by chaining calls to JSON Patch operation methods. """ pointer = self._ensure_pointer(path) self.ops.append(OpTest(path=pointer, value=value)) return self def apply( self, data: Union[str, IOBase, MutableSequence[Any], MutableMapping[str, Any]], ) -> object: """Apply all operations from this patch to _data_. If _data_ is a string or file-like object, it will be loaded with _json.loads_. Otherwise _data_ should be a JSON-like data structure and will be modified in place. When modifying _data_ in place, we return modified data too. This is to allow for replacing _data's_ root element, which is allowed by some patch operations. Arguments: data: The target JSON "document" or equivalent Python objects. Returns: Modified input data. Raises: JSONPatchError: When a patch operation fails. JSONPatchTestFailure: When a _test_ operation does not pass. `JSONPatchTestFailure` is a subclass of `JSONPatchError`. """ _data = load_data(data) for i, op in enumerate(self.ops): try: _data = op.apply(_data) except JSONPatchTestFailure as err: raise JSONPatchTestFailure(f"test failed ({op.name}:{i})") from err except JSONPointerKeyError as err: raise JSONPatchError(f"{err} ({op.name}:{i})") from err except JSONPointerIndexError as err: raise JSONPatchError(f"{err} ({op.name}:{i})") from err except JSONPointerTypeError as err: raise JSONPatchError(f"{err} ({op.name}:{i})") from err except (JSONPointerError, JSONPatchError) as err: raise JSONPatchError(f"{err} ({op.name}:{i})") from err return _data def asdicts(self) -> List[Dict[str, object]]: """Return a list of this patch's operations as dictionaries.""" return [op.asdict() for op in self.ops] def apply( patch: Union[str, IOBase, Iterable[Mapping[str, object]], None], data: Union[str, IOBase, MutableSequence[Any], MutableMapping[str, Any]], *, unicode_escape: bool = True, uri_decode: bool = False, ) -> object: """Apply the JSON Patch _patch_ to _data_. If _data_ is a string or file-like object, it will be loaded with _json.loads_. Otherwise _data_ should be a JSON-like data structure and will be **modified in-place**. When modifying _data_ in-place, we return modified data too. This is to allow for replacing _data's_ root element, which is allowed by some patch operations. Arguments: patch: A JSON Patch formatted document or equivalent Python objects. data: The target JSON "document" or equivalent Python objects. unicode_escape: If `True`, UTF-16 escape sequences will be decoded before parsing JSON pointers. uri_decode: If `True`, JSON pointers will be unescaped using _urllib_ before being parsed. Returns: Modified input data. Raises: JSONPatchError: When a patch operation fails. JSONPatchTestFailure: When a _test_ operation does not pass. `JSONPatchTestFailure` is a subclass of `JSONPatchError`. """ return JSONPatch( patch, unicode_escape=unicode_escape, uri_decode=uri_decode, ).apply(data) jg-rp-python-jsonpath-830094f/jsonpath/path.py000066400000000000000000000364551512714264000213260ustar00rootroot00000000000000"""A compiled JSONPath ready to be applied to a JSON string or Python object.""" from __future__ import annotations import itertools from typing import TYPE_CHECKING from typing import AsyncIterable from typing import Iterable from typing import List from typing import Optional from typing import Tuple from typing import TypeVar from typing import Union from jsonpath._data import load_data from jsonpath.fluent_api import Query from jsonpath.match import FilterContextVars from jsonpath.match import JSONPathMatch from jsonpath.segments import JSONPathRecursiveDescentSegment from jsonpath.selectors import IndexSelector from jsonpath.selectors import NameSelector if TYPE_CHECKING: from jsonpath._types import JSONData from .env import JSONPathEnvironment from .segments import JSONPathSegment class JSONPath: """A compiled JSONPath ready to be applied to a JSON string or Python object. Arguments: env: The `JSONPathEnvironment` this path is bound to. segments: An iterable of `JSONPathSegment` instances, as generated by a `Parser`. pseudo_root: Indicates if target JSON values should be wrapped in a single- element array, so as to make the target root value selectable. Attributes: env: The `JSONPathEnvironment` this path is bound to. selectors: The `JSONPathSelector` instances that make up this path. """ __slots__ = ("env", "pseudo_root", "segments") def __init__( self, *, env: JSONPathEnvironment, segments: Iterable[JSONPathSegment], pseudo_root: bool = False, ) -> None: self.env = env self.segments = tuple(segments) self.pseudo_root = pseudo_root def __str__(self) -> str: return self.env.root_token + "".join(str(segment) for segment in self.segments) def __eq__(self, __value: object) -> bool: return isinstance(__value, JSONPath) and self.segments == __value.segments def __hash__(self) -> int: return hash(self.segments) def findall( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> List[object]: """Find all objects in `data` matching the given JSONPath `path`. If `data` is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A list of matched objects. If there are no matches, the list will be empty. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return [ match.obj for match in self.finditer(data, filter_context=filter_context) ] def finditer( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> Iterable[JSONPathMatch]: """Generate `JSONPathMatch` objects for each match. If `data` is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: An iterator yielding `JSONPathMatch` objects for each match. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ _data = load_data(data) path = self.env.pseudo_root_token if self.pseudo_root else self.env.root_token matches: Iterable[JSONPathMatch] = [ JSONPathMatch( filter_context=filter_context or {}, obj=[_data] if self.pseudo_root else _data, parent=None, path=path, parts=(), root=_data, ) ] for segment in self.segments: matches = segment.resolve(matches) return matches async def findall_async( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> List[object]: """An async version of `findall()`.""" return [ match.obj async for match in await self.finditer_async( data, filter_context=filter_context ) ] async def finditer_async( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> AsyncIterable[JSONPathMatch]: """An async version of `finditer()`.""" _data = load_data(data) path = self.env.pseudo_root_token if self.pseudo_root else self.env.root_token async def root_iter() -> AsyncIterable[JSONPathMatch]: yield self.env.match_class( filter_context=filter_context or {}, obj=[_data] if self.pseudo_root else _data, parent=None, path=path, parts=(), root=_data, ) matches: AsyncIterable[JSONPathMatch] = root_iter() for segment in self.segments: matches = segment.resolve_async(matches) return matches def match( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> Union[JSONPathMatch, None]: """Return a `JSONPathMatch` instance for the first object found in _data_. `None` is returned if there are no matches. Arguments: data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A `JSONPathMatch` object for the first match, or `None` if there were no matches. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ try: return next(iter(self.finditer(data, filter_context=filter_context))) except StopIteration: return None def query( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> Query: """Return a `Query` iterator over matches found by applying this path to _data_. Arguments: data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A query iterator. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return Query(self.finditer(data, filter_context=filter_context), self.env) def empty(self) -> bool: """Return `True` if this path has no selectors.""" return not bool(self.segments) def singular_query(self) -> bool: """Return `True` if this JSONPath query is a singular query.""" for segment in self.segments: if isinstance(segment, JSONPathRecursiveDescentSegment): return False if len(segment.selectors) == 1 and isinstance( segment.selectors[0], (NameSelector, IndexSelector) ): continue return False return True class CompoundJSONPath: """Multiple `JSONPath`s combined.""" __slots__ = ("env", "path", "paths") def __init__( self, *, env: JSONPathEnvironment, path: Union[JSONPath, CompoundJSONPath], paths: Iterable[Tuple[str, JSONPath]] = (), ) -> None: self.env = env self.path = path self.paths = tuple(paths) def __str__(self) -> str: buf: List[str] = [str(self.path)] for op, path in self.paths: buf.append(f" {op} ") buf.append(str(path)) return "".join(buf) def __eq__(self, __value: object) -> bool: return ( isinstance(__value, CompoundJSONPath) and self.path == __value.path and self.paths == __value.paths ) def __hash__(self) -> int: return hash((self.path, self.paths)) def findall( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> List[object]: """Find all objects in `data` matching the given JSONPath `path`. If `data` is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A list of matched objects. If there are no matches, the list will be empty. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ objs = self.path.findall(data, filter_context=filter_context) for op, path in self.paths: _objs = path.findall(data, filter_context=filter_context) if op == self.env.union_token: objs.extend(_objs) else: assert op == self.env.intersection_token, op objs = [obj for obj in objs if obj in _objs] return objs def finditer( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> Iterable[JSONPathMatch]: """Generate `JSONPathMatch` objects for each match. If `data` is a string or a file-like objects, it will be loaded using `json.loads()` and the default `JSONDecoder`. Arguments: data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: An iterator yielding `JSONPathMatch` objects for each match. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ matches = self.path.finditer(data, filter_context=filter_context) for op, path in self.paths: _matches = path.finditer(data, filter_context=filter_context) if op == self.env.union_token: matches = itertools.chain(matches, _matches) else: assert op == self.env.intersection_token _objs = [match.obj for match in _matches] matches = (match for match in matches if match.obj in _objs) return matches def match( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> Union[JSONPathMatch, None]: """Return a `JSONPathMatch` instance for the first object found in _data_. `None` is returned if there are no matches. Arguments: data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A `JSONPathMatch` object for the first match, or `None` if there were no matches. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ try: return next(iter(self.finditer(data, filter_context=filter_context))) except StopIteration: return None async def findall_async( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> List[object]: """An async version of `findall()`.""" objs = await self.path.findall_async(data, filter_context=filter_context) for op, path in self.paths: _objs = await path.findall_async(data, filter_context=filter_context) if op == self.env.union_token: objs.extend(_objs) else: assert op == self.env.intersection_token objs = [obj for obj in objs if obj in _objs] return objs async def finditer_async( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> AsyncIterable[JSONPathMatch]: """An async version of `finditer()`.""" matches = await self.path.finditer_async(data, filter_context=filter_context) for op, path in self.paths: _matches = await path.finditer_async(data, filter_context=filter_context) if op == self.env.union_token: matches = _achain(matches, _matches) else: assert op == self.env.intersection_token _objs = [match.obj async for match in _matches] matches = (match async for match in matches if match.obj in _objs) return matches def query( self, data: JSONData, *, filter_context: Optional[FilterContextVars] = None ) -> Query: """Return a `Query` iterator over matches found by applying this path to _data_. Arguments: data: A JSON document or Python object implementing the `Sequence` or `Mapping` interfaces. filter_context: Arbitrary data made available to filters using the _filter context_ selector. Returns: A query iterator. Raises: JSONPathSyntaxError: If the path is invalid. JSONPathTypeError: If a filter expression attempts to use types in an incompatible way. """ return Query(self.finditer(data, filter_context=filter_context), self.env) def union(self, path: JSONPath) -> CompoundJSONPath: """Union of this path and another path.""" return self.__class__( env=self.env, path=self.path, paths=self.paths + ((self.env.union_token, path),), ) def intersection(self, path: JSONPath) -> CompoundJSONPath: """Intersection of this path and another path.""" return self.__class__( env=self.env, path=self.path, paths=self.paths + ((self.env.intersection_token, path),), ) T = TypeVar("T") async def _achain(*iterables: AsyncIterable[T]) -> AsyncIterable[T]: for it in iterables: async for element in it: yield element jg-rp-python-jsonpath-830094f/jsonpath/pointer.py000066400000000000000000000547751512714264000220570ustar00rootroot00000000000000"""JSON Pointer. See https://datatracker.ietf.org/doc/html/rfc6901.""" from __future__ import annotations import codecs import re from functools import reduce from operator import getitem from typing import TYPE_CHECKING from typing import Any from typing import Iterable from typing import Mapping from typing import Optional from typing import Sequence from typing import Tuple from typing import Union from urllib.parse import unquote from jsonpath._data import load_data from jsonpath.exceptions import JSONPointerError from jsonpath.exceptions import JSONPointerIndexError from jsonpath.exceptions import JSONPointerKeyError from jsonpath.exceptions import JSONPointerResolutionError from jsonpath.exceptions import JSONPointerTypeError from jsonpath.exceptions import RelativeJSONPointerIndexError from jsonpath.exceptions import RelativeJSONPointerSyntaxError if TYPE_CHECKING: from io import IOBase from .match import JSONPathMatch class _Undefined: def __str__(self) -> str: return "" # pragma: no cover UNDEFINED = _Undefined() class JSONPointer: """Identify a single, specific value in JSON-like data, as per RFC 6901. Args: pointer: A string representation of a JSON Pointer. parts: The keys, indices and/or slices that make up a JSON Pointer. If given, it is assumed that the parts have already been parsed by the JSONPath parser. `unicode_escape` and `uri_decode` are ignored if _parts_ is given. unicode_escape: If `True`, UTF-16 escape sequences will be decoded before parsing the pointer. uri_decode: If `True`, the pointer will be unescaped using _urllib_ before being parsed. Attributes: keys_selector (str): The non-standard token used to target a mapping key or name. max_int_index (int): The maximum integer allowed when resolving array items by index. Defaults to `(2**53) - 1`. min_int_index (int): The minimum integer allowed when resolving array items by index. Defaults to `0`. """ __slots__ = ("_s", "parts") keys_selector = "~" max_int_index = (2**53) - 1 min_int_index = 0 def __init__( self, pointer: str, *, parts: Tuple[Union[int, str], ...] = (), unicode_escape: bool = True, uri_decode: bool = False, ) -> None: if parts: self.parts = tuple(str(part) for part in parts) else: self.parts = self._parse( pointer, unicode_escape=unicode_escape, uri_decode=uri_decode, ) self._s = self._encode(self.parts) def __str__(self) -> str: return self._s def _parse( self, s: str, *, unicode_escape: bool, uri_decode: bool, ) -> Tuple[str, ...]: if uri_decode: s = unquote(s) if unicode_escape: s = self._unicode_escape(s) s = s.lstrip() if s and not s.startswith("/"): raise JSONPointerError( "pointer must start with a slash or be the empty string" ) return tuple(p.replace("~1", "/").replace("~0", "~") for p in s.split("/"))[1:] def _index(self, key: str) -> Optional[int]: """Return an array index for `key`. Return `None` if key can't be converted to an index. """ # Reject indexes that start with a zero. if len(key) > 1 and key.startswith("0"): return None try: index = int(key) except ValueError: return None if index < self.min_int_index or index > self.max_int_index: raise JSONPointerIndexError( f"array indices must be between {self.min_int_index}" f" and {self.max_int_index}" ) return index def _getitem(self, obj: Any, key: str) -> Any: try: # Handle the most common cases. A mapping with a string key, or a sequence # with an integer index. if isinstance(obj, Sequence) and not isinstance(obj, str): index = self._index(key) if isinstance(index, int): return getitem(obj, index) return getitem(obj, key) except KeyError as err: return self._handle_key_error(obj, key, err) except TypeError as err: return self._handle_type_error(obj, key, err) except IndexError as err: if not isinstance(err, JSONPointerIndexError): raise JSONPointerIndexError(f"index out of range: {key}") from err raise def _handle_key_error(self, obj: Any, key: str, err: Exception) -> object: # Handle non-standard key/property selector/pointer. # # For the benefit of `RelativeJSONPointer.to()` and `JSONPathMatch.pointer()`, # treat keys starting with a `#` or `~` as a "key pointer". If `key[1:]` is a # key in `obj`, return the key. # # Note that if a key with a leading `#`/`~` exists in `obj`, it will have been # handled by `_getitem`. if ( isinstance(obj, Mapping) and key.startswith((self.keys_selector, "#")) and key[1:] in obj ): return key[1:] raise JSONPointerKeyError(key) from err def _handle_type_error(self, obj: Any, key: str, err: Exception) -> object: if not isinstance(obj, Sequence) or not isinstance(key, str): raise JSONPointerTypeError(f"{key}: {err}") from err if key == "-": # "-" is a valid index when appending to a JSON array with JSON Patch, but # not when resolving a JSON Pointer. raise JSONPointerIndexError("index out of range") from None # Handle non-standard index pointer. # # For the benefit of `RelativeJSONPointer.to()`, treat keys starting with a `#` # and followed by a valid index as an "index pointer". If `int(key[1:])` is # less than `len(obj)`, return the index. if re.match(r"#[1-9]\d*", key): _index = int(key[1:]) if _index >= len(obj): raise JSONPointerIndexError(f"index out of range: {_index}") from err return _index raise JSONPointerTypeError(f"{key}: {err}") from err def resolve( self, data: Union[str, IOBase, Sequence[object], Mapping[str, object]], *, default: object = UNDEFINED, ) -> object: """Resolve this pointer against _data_. Args: data: The target JSON "document" or equivalent Python objects. default: A default value to return if the pointer can't be resolved against the given data. Returns: The object in _data_ pointed to by this pointer. Raises: JSONPointerIndexError: When attempting to access a sequence by an out of range index, unless a default is given. JSONPointerKeyError: If any mapping object along the path does not contain a specified key, unless a default is given. JSONPointerTypeError: When attempting to resolve a non-index string path part against a sequence, unless a default is given. """ data = load_data(data) try: return reduce(self._getitem, self.parts, data) except JSONPointerResolutionError: if default is not UNDEFINED: return default raise def resolve_parent( self, data: Union[str, IOBase, Sequence[object], Mapping[str, object]] ) -> Tuple[Union[Sequence[object], Mapping[str, object], None], object]: """Resolve this pointer against _data_, return the object and its parent. Args: data: The target JSON "document" or equivalent Python objects. Returns: A `(parent, object)` tuple, where parent will be `None` if this pointer points to the root node in the document. If the parent exists but the last object does not, `(parent, UNDEFINED)` will be returned. Raises: JSONPointerIndexError: When attempting to access a sequence by an out of range index, unless using the special `-` index. JSONPointerKeyError: If any mapping object along the path does not contain a specified key, unless it is the last part of the pointer. JSONPointerTypeError: When attempting to resolve a non-index string path part against a sequence. """ if not self.parts: return (None, self.resolve(data)) _data = load_data(data) parent = reduce(self._getitem, self.parts[:-1], _data) try: return (parent, self._getitem(parent, self.parts[-1])) except (JSONPointerIndexError, JSONPointerKeyError): return (parent, UNDEFINED) @staticmethod def _encode(parts: Iterable[Union[int, str]]) -> str: if parts: return "/" + "/".join( str(p).replace("~", "~0").replace("/", "~1") for p in parts ) return "" def _unicode_escape(self, s: str) -> str: # UTF-16 escape sequences - possibly surrogate pairs - inside UTF-8 # encoded strings. As per https://datatracker.ietf.org/doc/html/rfc4627 # section 2.5. return ( codecs.decode(s.replace("\\/", "/"), "unicode-escape") .encode("utf-16", "surrogatepass") .decode("utf-16") ) @classmethod def from_match( cls, match: JSONPathMatch, ) -> JSONPointer: """Return a JSON Pointer for the path from a JSONPathMatch instance.""" # An RFC 6901 string representation of match.parts. if match.parts: pointer = cls._encode(match.parts) else: # This should not happen, unless the JSONPathMatch has been tampered with. pointer = "" # pragma: no cover return cls( pointer, parts=match.parts, unicode_escape=False, uri_decode=False, ) @classmethod def from_parts( cls, parts: Iterable[Union[int, str]], *, unicode_escape: bool = True, uri_decode: bool = False, ) -> JSONPointer: """Build a JSON Pointer from _parts_. Args: parts: The keys, indices and/or slices that make up a JSONPointer. unicode_escape: If `True`, UTF-16 escape sequences will be decoded before parsing the pointer. uri_decode: If `True`, the pointer will be unescaped using _urllib_ before being parsed. Returns: A new `JSONPointer` built from _parts_. """ _parts = (str(p) for p in parts) if uri_decode: _parts = (unquote(p) for p in _parts) if unicode_escape: _parts = ( codecs.decode(p.replace("\\/", "/"), "unicode-escape") .encode("utf-16", "surrogatepass") .decode("utf-16") for p in _parts ) __parts = tuple(_parts) if __parts: pointer = cls._encode(__parts) else: pointer = "" return cls( pointer, parts=__parts, unicode_escape=False, uri_decode=False, ) def is_relative_to(self, other: JSONPointer) -> bool: """Return _True_ if this pointer points to a child of _other_.""" return ( len(other.parts) < len(self.parts) and self.parts[: len(other.parts)] == other.parts ) def __eq__(self, other: object) -> bool: return isinstance(other, self.__class__) and self.parts == other.parts def __hash__(self) -> int: return hash((self.__class__, self.parts)) # pragma: no cover def __repr__(self) -> str: return f"{self.__class__.__name__}({self._s!r})" # pragma: no cover def exists( self, data: Union[str, IOBase, Sequence[object], Mapping[str, object]] ) -> bool: """Return _True_ if this pointer can be resolved against _data_. Note that `JSONPointer.resolve()` can return legitimate falsy values that form part of the target JSON document. This method will return `True` if a falsy value is found. Args: data: The target JSON "document" or equivalent Python objects. Returns: _True_ if this pointer can be resolved against _data_, or _False_ otherwise. **_New in version 0.9.0_** """ try: self.resolve(data) except JSONPointerResolutionError: return False return True def parent(self) -> JSONPointer: """Return this pointer's parent, as a new `JSONPointer`. If this pointer points to the document root, _self_ is returned. **_New in version 0.9.0_** """ if not self.parts: return self parent_parts = self.parts[:-1] return self.__class__( self._encode(parent_parts), parts=parent_parts, unicode_escape=False, uri_decode=False, ) def __truediv__(self, other: object) -> JSONPointer: """Join this pointer with _other_. _other_ is expected to be a JSON Pointer string, possibly without a leading slash. If _other_ does have a leading slash, the previous pointer is ignored and a new JSONPath is returned from _other_. _other_ should not be a "Relative JSON Pointer". """ if not isinstance(other, str): raise TypeError( "unsupported operand type for /: " f"{self.__class__.__name__!r} and {other.__class__.__name__!r}" ) other = self._unicode_escape(other.lstrip()) if other.startswith("/"): return self.__class__(other, unicode_escape=False, uri_decode=False) parts = self.parts + tuple( p.replace("~1", "/").replace("~0", "~") for p in other.split("/") ) return self.__class__( self._encode(parts), parts=parts, unicode_escape=False, uri_decode=False ) def join(self, *parts: str) -> JSONPointer: """Join this pointer with _parts_. Each part is expected to be a JSON Pointer string, possibly without a leading slash. If a part does have a leading slash, the previous pointer is ignored and a new `JSONPointer` is created, and processing of remaining parts continues. """ pointer = self for part in parts: pointer = pointer / part return pointer def to( self, rel: Union[RelativeJSONPointer, str], *, unicode_escape: bool = True, uri_decode: bool = False, ) -> JSONPointer: """Return a new pointer relative to this pointer. Args: rel: A `RelativeJSONPointer` or a string following "Relative JSON Pointer" syntax. unicode_escape: If `True`, UTF-16 escape sequences will be decoded before parsing the pointer. uri_decode: If `True`, the pointer will be unescaped using _urllib_ before being parsed. See https://www.ietf.org/id/draft-hha-relative-json-pointer-00.html """ relative_pointer = ( RelativeJSONPointer( rel, unicode_escape=unicode_escape, uri_decode=uri_decode ) if isinstance(rel, str) else rel ) return relative_pointer.to(self) RE_RELATIVE_POINTER = re.compile( r"(?P\d+)(?P(?P[+\-])(?P\d))?(?P.*)", re.DOTALL, ) class RelativeJSONPointer: """A Relative JSON Pointer. See https://www.ietf.org/id/draft-hha-relative-json-pointer-00.html Args: rel: A string following Relative JSON Pointer syntax. unicode_escape: If `True`, UTF-16 escape sequences will be decoded before parsing the pointer. uri_decode: If `True`, the pointer will be unescaped using _urllib_ before being parsed. """ __slots__ = ("origin", "index", "pointer") def __init__( self, rel: str, *, unicode_escape: bool = True, uri_decode: bool = False, ) -> None: self.origin, self.index, self.pointer = self._parse( rel, unicode_escape=unicode_escape, uri_decode=uri_decode ) def __str__(self) -> str: sign = "+" if self.index > 0 else "" index = "" if self.index == 0 else f"{sign}{self.index}" return f"{self.origin}{index}{self.pointer}" def __eq__(self, __value: object) -> bool: return isinstance(__value, RelativeJSONPointer) and str(self) == str(__value) def __hash__(self) -> int: return hash((self.origin, self.index, self.pointer)) # pragma: no cover def _parse( self, rel: str, *, unicode_escape: bool = True, uri_decode: bool = False, ) -> Tuple[int, int, Union[JSONPointer, str]]: rel = rel.lstrip() match = RE_RELATIVE_POINTER.match(rel) if not match: raise RelativeJSONPointerSyntaxError("", rel) # Steps to move origin = self._zero_or_positive(match.group("ORIGIN"), rel) # Optional index manipulation if match.group("INDEX_G"): index = self._zero_or_positive(match.group("INDEX"), rel) if index == 0: raise RelativeJSONPointerSyntaxError("index offset can't be zero", rel) if match.group("SIGN") == "-": index = -index else: index = 0 # Pointer or '#'. Empty string is OK. _pointer = match.group("POINTER").strip() pointer = ( JSONPointer( _pointer, unicode_escape=unicode_escape, uri_decode=uri_decode, ) if _pointer != "#" else _pointer ) return (origin, index, pointer) def _zero_or_positive(self, s: str, rel: str) -> int: # TODO: accept start and stop index for better error messages if s.startswith("0") and len(s) > 1: raise RelativeJSONPointerSyntaxError("unexpected leading zero", rel) try: return int(s) except ValueError as err: raise RelativeJSONPointerSyntaxError( "expected positive int or zero", rel ) from err def _int_like(self, obj: Any) -> bool: if isinstance(obj, int): return True try: int(obj) except ValueError: return False return True def to( self, pointer: Union[JSONPointer, str], *, unicode_escape: bool = True, uri_decode: bool = False, ) -> JSONPointer: """Return a new JSONPointer relative to _pointer_. Args: pointer: A `JSONPointer` instance or a string following JSON Pointer syntax. unicode_escape: If `True`, UTF-16 escape sequences will be decoded before parsing the pointer. uri_decode: If `True`, the pointer will be unescaped using _urllib_ before being parsed. """ _pointer = ( JSONPointer(pointer, unicode_escape=unicode_escape, uri_decode=uri_decode) if isinstance(pointer, str) else pointer ) # Move to origin if self.origin > len(_pointer.parts): raise RelativeJSONPointerIndexError( f"origin ({self.origin}) exceeds root ({len(_pointer.parts)})" ) if self.origin < 1: parts = list(_pointer.parts) else: parts = list(_pointer.parts[: -self.origin]) # Array index offset if self.index and parts and self._int_like(parts[-1]): new_index = int(parts[-1]) + self.index if new_index < 0: raise RelativeJSONPointerIndexError( f"index offset out of range {new_index}" ) parts[-1] = str(int(parts[-1]) + self.index) # Pointer or index/property if isinstance(self.pointer, JSONPointer): parts.extend(self.pointer.parts) else: assert self.pointer == "#" parts[-1] = f"#{parts[-1]}" return JSONPointer.from_parts( parts, unicode_escape=unicode_escape, uri_decode=uri_decode ) def resolve( pointer: Union[str, Iterable[Union[str, int]]], data: Union[str, IOBase, Sequence[object], Mapping[str, object]], *, default: object = UNDEFINED, unicode_escape: bool = True, uri_decode: bool = False, ) -> object: """Resolve JSON Pointer _pointer_ against _data_. Args: pointer: A string representation of a JSON Pointer or an iterable of JSON Pointer parts. data: The target JSON "document" or equivalent Python objects. default: A default value to return if the pointer can't be resolved. against the given data. unicode_escape: If `True`, UTF-16 escape sequences will be decoded before parsing the pointer. uri_decode: If `True`, the pointer will be unescaped using _urllib_ before being parsed. Returns: The object in _data_ pointed to by this pointer. Raises: JSONPointerIndexError: When attempting to access a sequence by an out of range index, unless a default is given. JSONPointerKeyError: If any mapping object along the path does not contain a specified key, unless a default is given. JSONPointerTypeError: When attempting to resolve a non-index string path part against a sequence, unless a default is given. """ if isinstance(pointer, str): try: return JSONPointer( pointer, unicode_escape=unicode_escape, uri_decode=uri_decode ).resolve(data) except JSONPointerResolutionError: if default is not UNDEFINED: return default raise try: return JSONPointer.from_parts( pointer, unicode_escape=unicode_escape, uri_decode=uri_decode ).resolve(data) except JSONPointerResolutionError: if default is not UNDEFINED: return default raise jg-rp-python-jsonpath-830094f/jsonpath/py.typed000066400000000000000000000000001512714264000214700ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/jsonpath/segments.py000066400000000000000000000105521512714264000222050ustar00rootroot00000000000000"""JSONPath child and descendant segment definitions.""" from __future__ import annotations from abc import ABC from abc import abstractmethod from typing import TYPE_CHECKING from typing import AsyncIterable from typing import Iterable from typing import Mapping from typing import Sequence from typing import Tuple from .exceptions import JSONPathRecursionError if TYPE_CHECKING: from .env import JSONPathEnvironment from .match import JSONPathMatch from .selectors import JSONPathSelector from .token import Token class JSONPathSegment(ABC): """Base class for all JSONPath segments.""" __slots__ = ("env", "token", "selectors") def __init__( self, *, env: JSONPathEnvironment, token: Token, selectors: Tuple[JSONPathSelector, ...], ) -> None: self.env = env self.token = token self.selectors = selectors @abstractmethod def resolve(self, nodes: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: """Apply this segment to each `JSONPathMatch` in _nodes_.""" @abstractmethod def resolve_async( self, nodes: AsyncIterable[JSONPathMatch] ) -> AsyncIterable[JSONPathMatch]: """An async version of `resolve`.""" class JSONPathChildSegment(JSONPathSegment): """The JSONPath child selection segment.""" def resolve(self, nodes: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: """Select children of each node in _nodes_.""" for node in nodes: for selector in self.selectors: yield from selector.resolve(node) async def resolve_async( self, nodes: AsyncIterable[JSONPathMatch] ) -> AsyncIterable[JSONPathMatch]: """An async version of `resolve`.""" async for node in nodes: for selector in self.selectors: async for match in selector.resolve_async(node): yield match def __str__(self) -> str: return f"[{', '.join(str(itm) for itm in self.selectors)}]" def __eq__(self, __value: object) -> bool: return ( isinstance(__value, JSONPathChildSegment) and self.selectors == __value.selectors and self.token == __value.token ) def __hash__(self) -> int: return hash((self.selectors, self.token)) class JSONPathRecursiveDescentSegment(JSONPathSegment): """The JSONPath recursive descent segment.""" def resolve(self, nodes: Iterable[JSONPathMatch]) -> Iterable[JSONPathMatch]: """Select descendants of each node in _nodes_.""" for node in nodes: for _node in self._visit(node): for selector in self.selectors: yield from selector.resolve(_node) async def resolve_async( self, nodes: AsyncIterable[JSONPathMatch] ) -> AsyncIterable[JSONPathMatch]: """An async version of `resolve`.""" async for node in nodes: for _node in self._visit(node): for selector in self.selectors: async for match in selector.resolve_async(_node): yield match def _visit(self, node: JSONPathMatch, depth: int = 1) -> Iterable[JSONPathMatch]: """Depth-first, pre-order node traversal.""" if depth > self.env.max_recursion_depth: raise JSONPathRecursionError("recursion limit exceeded", token=self.token) yield node if isinstance(node.obj, Mapping): for name, val in node.obj.items(): if isinstance(val, (Mapping, Sequence)): _node = node.new_child(val, name) yield from self._visit(_node, depth + 1) elif isinstance(node.obj, Sequence) and not isinstance(node.obj, str): for i, item in enumerate(node.obj): if isinstance(item, (Mapping, Sequence)): _node = node.new_child(item, i) yield from self._visit(_node, depth + 1) def __str__(self) -> str: return f"..[{', '.join(str(itm) for itm in self.selectors)}]" def __eq__(self, __value: object) -> bool: return ( isinstance(__value, JSONPathRecursiveDescentSegment) and self.selectors == __value.selectors and self.token == __value.token ) def __hash__(self) -> int: return hash(("..", self.selectors, self.token)) jg-rp-python-jsonpath-830094f/jsonpath/selectors.py000066400000000000000000000601051512714264000223620ustar00rootroot00000000000000"""JSONPath segments and selectors, as returned from `Parser.parse`.""" from __future__ import annotations from abc import ABC from abc import abstractmethod from collections.abc import Mapping from collections.abc import Sequence from contextlib import suppress from typing import TYPE_CHECKING from typing import Any from typing import AsyncIterable from typing import Iterable from typing import Optional from typing import Union from .exceptions import JSONPathIndexError from .exceptions import JSONPathSyntaxError from .exceptions import JSONPathTypeError from .match import NodeList from .serialize import canonical_string if TYPE_CHECKING: from .env import JSONPathEnvironment from .filter import FilterExpression from .match import JSONPathMatch from .path import JSONPath from .token import Token class JSONPathSelector(ABC): """Base class for all JSONPath segments and selectors.""" __slots__ = ("env", "token") def __init__(self, *, env: JSONPathEnvironment, token: Token) -> None: self.env = env self.token = token @abstractmethod def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: """Apply the segment/selector to each node in _matches_. Arguments: node: A node matched by preceding segments/selectors. Returns: The `JSONPathMatch` instances created by applying this selector to each preceding node. """ @abstractmethod def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: """An async version of `resolve`.""" class NameSelector(JSONPathSelector): """Select at most one object member value given an object member name.""" __slots__ = ("name",) def __init__(self, *, env: JSONPathEnvironment, token: Token, name: str) -> None: super().__init__(env=env, token=token) self.name = name def __str__(self) -> str: return canonical_string(self.name) def __eq__(self, __value: object) -> bool: return ( isinstance(__value, NameSelector) and self.name == __value.name and self.token == __value.token ) def __hash__(self) -> int: return hash((self.name, self.token)) def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: if isinstance(node.obj, Mapping): with suppress(KeyError): match = node.new_child(self.env.getitem(node.obj, self.name), self.name) node.add_child(match) yield match async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: if isinstance(node.obj, Mapping): with suppress(KeyError): match = node.new_child( await self.env.getitem_async(node.obj, self.name), self.name ) node.add_child(match) yield match class IndexSelector(JSONPathSelector): """Select at most one array element value given an index.""" __slots__ = ("index", "_as_key") def __init__( self, *, env: JSONPathEnvironment, token: Token, index: int, ) -> None: if index < env.min_int_index or index > env.max_int_index: raise JSONPathIndexError("index out of range", token=token) super().__init__(env=env, token=token) self.index = index self._as_key = str(self.index) def __str__(self) -> str: return str(self.index) def __eq__(self, __value: object) -> bool: return ( isinstance(__value, IndexSelector) and self.index == __value.index and self.token == __value.token ) def __hash__(self) -> int: return hash((self.index, self.token)) def _normalized_index(self, obj: Sequence[object]) -> int: if self.index < 0 and len(obj) >= abs(self.index): return len(obj) + self.index return self.index def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: # Optionally try string representation of int if not self.env.strict and isinstance(node.obj, Mapping): # Try the string representation of the index as a key. with suppress(KeyError): match = node.new_child( self.env.getitem(node.obj, self._as_key), self.index ) node.add_child(match) yield match if isinstance(node.obj, Sequence) and not isinstance(node.obj, str): norm_index = self._normalized_index(node.obj) with suppress(IndexError): match = node.new_child( self.env.getitem(node.obj, self.index), norm_index ) node.add_child(match) yield match async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: if not self.env.strict and isinstance(node.obj, Mapping): # Try the string representation of the index as a key. with suppress(KeyError): match = node.new_child( await self.env.getitem_async(node.obj, self._as_key), self.index ) node.add_child(match) yield match if isinstance(node.obj, Sequence) and not isinstance(node.obj, str): norm_index = self._normalized_index(node.obj) with suppress(IndexError): match = node.new_child( await self.env.getitem_async(node.obj, self.index), norm_index ) node.add_child(match) yield match class KeySelector(JSONPathSelector): """Select at most one name from an object member, given the name. The key selector is introduced to facilitate valid normalized paths for nodes produced by the "keys selector" and the "keys filter selector". It is not expected to be of much use elsewhere. NOTE: This is a non-standard selector. See https://jg-rp.github.io/json-p3/guides/jsonpath-extra#key-selector. """ __slots__ = ("key",) def __init__(self, *, env: JSONPathEnvironment, token: Token, key: str) -> None: super().__init__(env=env, token=token) self.key = key def __str__(self) -> str: return f"{self.env.keys_selector_token}{canonical_string(self.key)}" def __eq__(self, __value: object) -> bool: return ( isinstance(__value, KeySelector) and self.token == __value.token and self.key == __value.key ) def __hash__(self) -> int: return hash((self.token, self.key)) def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: if isinstance(node.obj, Mapping) and self.key in node.obj: match = node.__class__( filter_context=node.filter_context(), obj=self.key, parent=node, parts=node.parts + (f"{self.env.keys_selector_token}{self.key}",), path=f"{node.path}[{self}]", root=node.root, ) node.add_child(match) yield match async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: for _node in self.resolve(node): yield _node class KeysSelector(JSONPathSelector): """Select all names from an object's name/value members. NOTE: This is a non-standard selector. See https://jg-rp.github.io/json-p3/guides/jsonpath-extra#keys-selector """ __slots__ = () def __init__(self, *, env: JSONPathEnvironment, token: Token) -> None: super().__init__(env=env, token=token) def __str__(self) -> str: return self.env.keys_selector_token def __eq__(self, __value: object) -> bool: return isinstance(__value, KeysSelector) and self.token == __value.token def __hash__(self) -> int: return hash(self.token) def _keys(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: if isinstance(node.obj, Mapping): for key in node.obj: match = node.__class__( filter_context=node.filter_context(), obj=key, parent=node, parts=node.parts + (f"{self.env.keys_selector_token}{key}",), path=f"{node.path}[{self.env.keys_selector_token}{canonical_string(key)}]", root=node.root, ) node.add_child(match) yield match def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: yield from self._keys(node) async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: for match in self._keys(node): yield match class SliceSelector(JSONPathSelector): """Select array elements given a start index, a stop index and a step.""" __slots__ = ("slice",) def __init__( self, *, env: JSONPathEnvironment, token: Token, start: Optional[int] = None, stop: Optional[int] = None, step: Optional[int] = None, ) -> None: super().__init__(env=env, token=token) self._check_range(start, stop, step) self.slice = slice(start, stop, step) def __str__(self) -> str: stop = self.slice.stop if self.slice.stop is not None else "" start = self.slice.start if self.slice.start is not None else "" step = self.slice.step if self.slice.step is not None else "1" return f"{start}:{stop}:{step}" def __eq__(self, __value: object) -> bool: return ( isinstance(__value, SliceSelector) and self.slice == __value.slice and self.token == __value.token ) def __hash__(self) -> int: return hash((str(self), self.token)) def _check_range(self, *indices: Optional[int]) -> None: for i in indices: if i is not None and ( i < self.env.min_int_index or i > self.env.max_int_index ): raise JSONPathIndexError("index out of range", token=self.token) def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: if not isinstance(node.obj, Sequence) or self.slice.step == 0: return for norm_index, obj in zip( # noqa: B905 range(*self.slice.indices(len(node.obj))), self.env.getitem(node.obj, self.slice), ): match = node.new_child(obj, norm_index) node.add_child(match) yield match async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: if not isinstance(node.obj, Sequence) or self.slice.step == 0: return for norm_index, obj in zip( # noqa: B905 range(*self.slice.indices(len(node.obj))), await self.env.getitem_async(node.obj, self.slice), ): match = node.new_child(obj, norm_index) node.add_child(match) yield match class WildcardSelector(JSONPathSelector): """Select nodes of all children of an object or array.""" __slots__ = () def __str__(self) -> str: return "*" def __eq__(self, __value: object) -> bool: return isinstance(__value, WildcardSelector) and self.token == __value.token def __hash__(self) -> int: return hash(self.token) def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: if isinstance(node.obj, Mapping): for key, val in node.obj.items(): match = node.new_child(val, key) node.add_child(match) yield match elif isinstance(node.obj, Sequence) and not isinstance(node.obj, str): for i, val in enumerate(node.obj): match = node.new_child(val, i) node.add_child(match) yield match async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: if isinstance(node.obj, Mapping): for key, val in node.obj.items(): match = node.new_child(val, key) node.add_child(match) yield match elif isinstance(node.obj, Sequence) and not isinstance(node.obj, str): for i, val in enumerate(node.obj): match = node.new_child(val, i) node.add_child(match) yield match class SingularQuerySelector(JSONPathSelector): """An embedded absolute query. The result of the embedded query is used as an object member name or array element index. NOTE: This is a non-standard selector. """ __slots__ = ("query",) def __init__( self, *, env: JSONPathEnvironment, token: Token, query: JSONPath ) -> None: super().__init__(env=env, token=token) self.query = query if env.strict: raise JSONPathSyntaxError("unexpected query selector", token=token) def __str__(self) -> str: return str(self.query) def __eq__(self, __value: object) -> bool: return ( isinstance(__value, SingularQuerySelector) and self.query == __value.query and self.token == __value.token ) def __hash__(self) -> int: return hash((self.query, self.token)) def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: if isinstance(node.obj, Mapping): nodes = NodeList(self.query.finditer(node.root)) if nodes.empty(): return value = nodes[0].value if not isinstance(value, str): return with suppress(KeyError): match = node.new_child(self.env.getitem(node.obj, value), value) node.add_child(match) yield match if isinstance(node.obj, Sequence) and not isinstance(node.obj, str): nodes = NodeList(self.query.finditer(node.root)) if nodes.empty(): return value = nodes[0].value if not isinstance(value, int): return index = self._normalized_index(node.obj, value) with suppress(IndexError): match = node.new_child(self.env.getitem(node.obj, index), index) node.add_child(match) yield match async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: if isinstance(node.obj, Mapping): nodes = NodeList( [match async for match in await self.query.finditer_async(node.root)] ) if nodes.empty(): return value = nodes[0].value if not isinstance(value, str): return with suppress(KeyError): match = node.new_child( await self.env.getitem_async(node.obj, value), value ) node.add_child(match) yield match if isinstance(node.obj, Sequence) and not isinstance(node.obj, str): nodes = NodeList( [match async for match in await self.query.finditer_async(node.root)] ) if nodes.empty(): return value = nodes[0].value if not isinstance(value, int): return index = self._normalized_index(node.obj, value) with suppress(IndexError): match = node.new_child( await self.env.getitem_async(node.obj, index), index ) node.add_child(match) yield match def _normalized_index(self, obj: Sequence[object], index: int) -> int: if index < 0 and len(obj) >= abs(index): return len(obj) + index return index class Filter(JSONPathSelector): """Select array elements or object values according to a filter expression.""" __slots__ = ("expression", "cacheable_nodes") def __init__( self, *, env: JSONPathEnvironment, token: Token, expression: FilterExpression, ) -> None: super().__init__(env=env, token=token) self.expression = expression # Compile-time check for cacheable nodes. self.cacheable_nodes = self.expression.cacheable_nodes() def __str__(self) -> str: return f"?{self.expression}" def __eq__(self, __value: object) -> bool: return ( isinstance(__value, Filter) and self.expression == __value.expression and self.token == __value.token ) def __hash__(self) -> int: return hash((str(self.expression), self.token)) def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: if self.cacheable_nodes and self.env.filter_caching: expr = self.expression.cache_tree() else: expr = self.expression if isinstance(node.obj, Mapping): for key, val in node.obj.items(): context = FilterContext( env=self.env, current=val, root=node.root, extra_context=node.filter_context(), current_key=key, ) try: if expr.evaluate(context): match = node.new_child(val, key) node.add_child(match) yield match except JSONPathTypeError as err: if not err.token: err.token = self.token raise elif isinstance(node.obj, Sequence) and not isinstance(node.obj, str): for i, obj in enumerate(node.obj): context = FilterContext( env=self.env, current=obj, root=node.root, extra_context=node.filter_context(), current_key=i, ) try: if expr.evaluate(context): match = node.new_child(obj, i) node.add_child(match) yield match except JSONPathTypeError as err: if not err.token: err.token = self.token raise async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: if self.cacheable_nodes and self.env.filter_caching: expr = self.expression.cache_tree() else: expr = self.expression if isinstance(node.obj, Mapping): for key, val in node.obj.items(): context = FilterContext( env=self.env, current=val, root=node.root, extra_context=node.filter_context(), current_key=key, ) try: result = await expr.evaluate_async(context) except JSONPathTypeError as err: if not err.token: err.token = self.token raise if result: match = node.new_child(val, key) node.add_child(match) yield match elif isinstance(node.obj, Sequence) and not isinstance(node.obj, str): for i, obj in enumerate(node.obj): context = FilterContext( env=self.env, current=obj, root=node.root, extra_context=node.filter_context(), current_key=i, ) try: result = await expr.evaluate_async(context) except JSONPathTypeError as err: if not err.token: err.token = self.token raise if result: match = node.new_child(obj, i) node.add_child(match) yield match class KeysFilter(JSONPathSelector): """Selects names from an object's name/value members. NOTE: This is a non-standard selector. See https://jg-rp.github.io/json-p3/guides/jsonpath-extra#keys-filter-selector """ __slots__ = ("expression",) def __init__( self, *, env: JSONPathEnvironment, token: Token, expression: FilterExpression, ) -> None: super().__init__(env=env, token=token) self.expression = expression def __str__(self) -> str: return f"~?{self.expression}" def __eq__(self, __value: object) -> bool: return ( isinstance(__value, Filter) and self.expression == __value.expression and self.token == __value.token ) def __hash__(self) -> int: return hash(("~", str(self.expression), self.token)) def resolve(self, node: JSONPathMatch) -> Iterable[JSONPathMatch]: if isinstance(node.value, Mapping): for key, val in node.value.items(): context = FilterContext( env=self.env, current=val, root=node.root, extra_context=node.filter_context(), current_key=key, ) try: if self.expression.evaluate(context): match = node.__class__( filter_context=node.filter_context(), obj=key, parent=node, parts=node.parts + (f"{self.env.keys_selector_token}{key}",), path=f"{node.path}[{self.env.keys_selector_token}{canonical_string(key)}]", root=node.root, ) node.add_child(match) yield match except JSONPathTypeError as err: if not err.token: err.token = self.token raise async def resolve_async(self, node: JSONPathMatch) -> AsyncIterable[JSONPathMatch]: if isinstance(node.value, Mapping): for key, val in node.value.items(): context = FilterContext( env=self.env, current=val, root=node.root, extra_context=node.filter_context(), current_key=key, ) try: if await self.expression.evaluate_async(context): match = node.__class__( filter_context=node.filter_context(), obj=key, parent=node, parts=node.parts + (f"{self.env.keys_selector_token}{key}",), path=f"{node.path}[{self.env.keys_selector_token}{canonical_string(key)}]", root=node.root, ) node.add_child(match) yield match except JSONPathTypeError as err: if not err.token: err.token = self.token raise class FilterContext: """Contextual information and data for evaluating a filter expression.""" __slots__ = ( "current_key", "current", "env", "extra_context", "root", ) def __init__( self, *, env: JSONPathEnvironment, current: object, root: Union[Sequence[Any], Mapping[str, Any]], extra_context: Optional[Mapping[str, Any]] = None, current_key: Union[str, int, None] = None, ) -> None: self.env = env self.current = current self.root = root self.extra_context = extra_context or {} self.current_key = current_key def __str__(self) -> str: return ( f"FilterContext(current={self.current}, " f"extra_context={self.extra_context!r})" ) jg-rp-python-jsonpath-830094f/jsonpath/serialize.py000066400000000000000000000005501512714264000223440ustar00rootroot00000000000000"""Helper functions for serializing compiled JSONPath queries.""" import json def canonical_string(value: str) -> str: """Return _value_ as a canonically formatted string literal.""" single_quoted = ( json.dumps(value, ensure_ascii=False)[1:-1] .replace('\\"', '"') .replace("'", "\\'") ) return f"'{single_quoted}'" jg-rp-python-jsonpath-830094f/jsonpath/stream.py000066400000000000000000000060221512714264000216500ustar00rootroot00000000000000"""Step through a stream of tokens.""" from __future__ import annotations from typing import Iterable from .exceptions import JSONPathSyntaxError from .token import TOKEN_EOF from .token import TOKEN_WHITESPACE from .token import Token class TokenStream: """Step through a stream of tokens.""" def __init__(self, token_iter: Iterable[Token]): self.tokens = list(token_iter) self.pos = 0 path = self.tokens[0].path if self.tokens else "" self.eof = Token(TOKEN_EOF, "", -1, path) def __str__(self) -> str: # pragma: no cover return f"current: {self.current}\nnext: {self.peek}" def current(self) -> Token: """Return the token at the current position in the stream.""" try: return self.tokens[self.pos] except IndexError: return self.eof def next(self) -> Token: """Return the token at the current position and advance the pointer.""" try: token = self.tokens[self.pos] self.pos += 1 return token except IndexError: return self.eof def peek(self, offset: int = 1) -> Token: """Return the token at current position plus the offset. Does not advance the pointer. """ try: return self.tokens[self.pos + offset] except IndexError: return self.eof def eat(self, kind: str, message: str | None = None) -> Token: """Assert tge type if the current token and advance the pointer.""" token = self.next() if token.kind != kind: raise JSONPathSyntaxError( message or f"expected {kind}, found {token.kind!r}", token=token, ) return token def expect(self, *typ: str) -> None: """Raise an exception of the current token is not in `typ`.""" token = self.current() if token.kind not in typ: if len(typ) == 1: _typ = repr(typ[0]) else: _typ = f"one of {typ!r}" raise JSONPathSyntaxError( f"expected {_typ}, found {token.kind!r}", token=token, ) def expect_peek(self, *typ: str) -> None: """Raise an exception of the current token is not in `typ`.""" token = self.peek() if token.kind not in typ: if len(typ) == 1: _typ = repr(typ[0]) else: _typ = f"one of {typ!r}" raise JSONPathSyntaxError( f"expected {_typ}, found {token.kind!r}", token=token, ) def expect_peek_not(self, typ: str, message: str) -> None: """Raise an exception if the next token kind of _typ_.""" if self.peek().kind == typ: raise JSONPathSyntaxError(message, token=self.peek()) def skip_whitespace(self) -> bool: """Skip whitespace.""" if self.current().kind == TOKEN_WHITESPACE: self.pos += 1 return True return False jg-rp-python-jsonpath-830094f/jsonpath/token.py000066400000000000000000000101071512714264000214740ustar00rootroot00000000000000"""JSONPath tokens.""" import sys from typing import Tuple # Utility tokens TOKEN_EOF = sys.intern("TOKEN_EOF") TOKEN_WHITESPACE = sys.intern("TOKEN_WHITESPACE") TOKEN_ERROR = sys.intern("TOKEN_ERROR") # JSONPath expression tokens TOKEN_COLON = sys.intern("TOKEN_COLON") TOKEN_COMMA = sys.intern("TOKEN_COMMA") TOKEN_DDOT = sys.intern("TOKEN_DDOT") TOKEN_DOT = sys.intern("TOKEN_DOT") TOKEN_FILTER = sys.intern("TOKEN_FILTER") TOKEN_KEY = sys.intern("TOKEN_KEY") TOKEN_KEYS = sys.intern("TOKEN_KEYS") TOKEN_KEYS_FILTER = sys.intern("TOKEN_KEYS_FILTER") TOKEN_LBRACKET = sys.intern("TOKEN_LBRACKET") TOKEN_PSEUDO_ROOT = sys.intern("TOKEN_PSEUDO_ROOT") TOKEN_RBRACKET = sys.intern("TOKEN_RBRACKET") TOKEN_ROOT = sys.intern("TOKEN_ROOT") TOKEN_WILD = sys.intern("TOKEN_WILD") TOKEN_NAME = sys.intern("TOKEN_NAME") TOKEN_DOT_PROPERTY = sys.intern("TOKEN_DOT_PROPERTY") TOKEN_DOT_KEY_PROPERTY = sys.intern("TOKEN_DOT_KEY_PROPERTY") TOKEN_KEY_NAME = sys.intern("TOKEN_KEY_NAME") # Filter expression tokens TOKEN_AND = sys.intern("TOKEN_AND") TOKEN_BLANK = sys.intern("TOKEN_BLANK") TOKEN_CONTAINS = sys.intern("TOKEN_CONTAINS") TOKEN_DOUBLE_QUOTE_STRING = sys.intern("TOKEN_DOUBLE_QUOTE_STRING") TOKEN_EMPTY = sys.intern("TOKEN_EMPTY") TOKEN_EQ = sys.intern("TOKEN_EQ") TOKEN_FALSE = sys.intern("TOKEN_FALSE") TOKEN_FILTER_CONTEXT = sys.intern("TOKEN_FILTER_CONTEXT") TOKEN_FLOAT = sys.intern("TOKEN_FLOAT") TOKEN_FUNCTION = sys.intern("TOKEN_FUNCTION") TOKEN_GE = sys.intern("TOKEN_GE") TOKEN_GT = sys.intern("TOKEN_GT") TOKEN_IN = sys.intern("TOKEN_IN") TOKEN_INT = sys.intern("TOKEN_INT") TOKEN_LE = sys.intern("TOKEN_LE") TOKEN_LG = sys.intern("TOKEN_LG") TOKEN_LPAREN = sys.intern("TOKEN_LPAREN") TOKEN_LT = sys.intern("TOKEN_LT") TOKEN_MISSING = sys.intern("TOKEN_MISSING") TOKEN_NE = sys.intern("TOKEN_NE") TOKEN_NIL = sys.intern("TOKEN_NIL") TOKEN_NONE = sys.intern("TOKEN_NONE") TOKEN_NOT = sys.intern("TOKEN_NOT") TOKEN_NULL = sys.intern("TOKEN_NULL") TOKEN_OP = sys.intern("TOKEN_OP") TOKEN_OR = sys.intern("TOKEN_OR") TOKEN_RE = sys.intern("TOKEN_RE") TOKEN_RE_FLAGS = sys.intern("TOKEN_RE_FLAGS") TOKEN_RE_PATTERN = sys.intern("TOKEN_RE_PATTERN") TOKEN_RPAREN = sys.intern("TOKEN_RPAREN") TOKEN_SELF = sys.intern("TOKEN_SELF") TOKEN_SINGLE_QUOTE_STRING = sys.intern("TOKEN_SINGLE_QUOTE_STRING") TOKEN_STRING = sys.intern("TOKEN_STRING") TOKEN_TRUE = sys.intern("TOKEN_TRUE") TOKEN_UNDEFINED = sys.intern("TOKEN_UNDEFINED") # Extension tokens TOKEN_INTERSECTION = sys.intern("TOKEN_INTERSECTION") TOKEN_UNION = sys.intern("TOKEN_UNION") class Token: """A token, as returned from `lex.Lexer.tokenize()`. Attributes: kind (str): The token's type. It is always one of the constants defined in _jsonpath.token.py_. value (str): The _path_ substring containing text for the token. index (str): The index at which _value_ starts in _path_. path (str): A reference to the complete JSONPath string from which this token derives. """ __slots__ = ("kind", "value", "index", "path") def __init__( self, kind: str, value: str, index: int, path: str, ) -> None: self.kind = kind self.value = value self.index = index self.path = path def __repr__(self) -> str: # pragma: no cover return ( f"Token(kind={self.kind}, value={self.value!r}, " f"index={self.index}, path={self.path!r})" ) def __eq__(self, other: object) -> bool: return ( isinstance(other, Token) and self.kind == other.kind and self.value == other.value and self.index == other.index and self.path == other.path ) def __hash__(self) -> int: return hash((self.kind, self.value, self.index, self.path)) def position(self) -> Tuple[int, int]: """Return the line and column number for the start of this token.""" line_number = self.value.count("\n", 0, self.index) + 1 column_number = self.index - self.value.rfind("\n", 0, self.index) return (line_number, column_number - 1) jg-rp-python-jsonpath-830094f/jsonpath/unescape.py000066400000000000000000000076051512714264000221700ustar00rootroot00000000000000r"""Replace `\uXXXX` escape sequences with Unicode code points.""" from typing import List from typing import Tuple from .exceptions import JSONPathSyntaxError from .token import Token def unescape_string(value: str, token: Token, quote: str) -> str: """Return `value` with escape sequences replaced with Unicode code points.""" unescaped: List[str] = [] index = 0 while index < len(value): ch = value[index] if ch == "\\": index += 1 _ch, index = _decode_escape_sequence(value, index, token, quote) unescaped.append(_ch) else: _string_from_codepoint(ord(ch), token) unescaped.append(ch) index += 1 return "".join(unescaped) def _decode_escape_sequence( # noqa: PLR0911 value: str, index: int, token: Token, quote: str ) -> Tuple[str, int]: try: ch = value[index] except IndexError as err: raise JSONPathSyntaxError("incomplete escape sequence", token=token) from err if ch == quote: return quote, index if ch == "\\": return "\\", index if ch == "/": return "/", index if ch == "b": return "\x08", index if ch == "f": return "\x0c", index if ch == "n": return "\n", index if ch == "r": return "\r", index if ch == "t": return "\t", index if ch == "u": codepoint, index = _decode_hex_char(value, index, token) return _string_from_codepoint(codepoint, token), index raise JSONPathSyntaxError( f"unknown escape sequence at index {token.index + index - 1}", token=token, ) def _decode_hex_char(value: str, index: int, token: Token) -> Tuple[int, int]: length = len(value) if index + 4 >= length: raise JSONPathSyntaxError( f"incomplete escape sequence at index {token.index + index - 1}", token=token, ) index += 1 # move past 'u' codepoint = _parse_hex_digits(value[index : index + 4], token) if _is_low_surrogate(codepoint): raise JSONPathSyntaxError( f"unexpected low surrogate at index {token.index + index - 1}", token=token, ) if _is_high_surrogate(codepoint): # expect a surrogate pair if not ( index + 9 < length and value[index + 4] == "\\" and value[index + 5] == "u" ): raise JSONPathSyntaxError( f"incomplete escape sequence at index {token.index + index - 2}", token=token, ) low_surrogate = _parse_hex_digits(value[index + 6 : index + 10], token) if not _is_low_surrogate(low_surrogate): raise JSONPathSyntaxError( f"unexpected codepoint at index {token.index + index + 4}", token=token, ) codepoint = 0x10000 + (((codepoint & 0x03FF) << 10) | (low_surrogate & 0x03FF)) return (codepoint, index + 9) return (codepoint, index + 3) def _parse_hex_digits(digits: str, token: Token) -> int: codepoint = 0 for digit in digits.encode(): codepoint <<= 4 if digit >= 48 and digit <= 57: codepoint |= digit - 48 elif digit >= 65 and digit <= 70: codepoint |= digit - 65 + 10 elif digit >= 97 and digit <= 102: codepoint |= digit - 97 + 10 else: raise JSONPathSyntaxError( "invalid \\uXXXX escape sequence", token=token, ) return codepoint def _string_from_codepoint(codepoint: int, token: Token) -> str: if codepoint <= 0x1F: raise JSONPathSyntaxError("invalid character", token=token) return chr(codepoint) def _is_high_surrogate(codepoint: int) -> bool: return codepoint >= 0xD800 and codepoint <= 0xDBFF def _is_low_surrogate(codepoint: int) -> bool: return codepoint >= 0xDC00 and codepoint <= 0xDFFF jg-rp-python-jsonpath-830094f/mkdocs.yml000066400000000000000000000035101512714264000201570ustar00rootroot00000000000000site_name: Python JSONPath site_description: A flexible JSONPath engine for Python. site_url: https://jg-rp.github.io/python-jsonpath/ theme: name: "material" palette: - scheme: "default" media: "(prefers-color-scheme: light)" primary: "blue" toggle: icon: "material/weather-sunny" name: "Switch to dark mode" - scheme: "slate" media: "(prefers-color-scheme: dark)" primary: "blue" accent: blue toggle: icon: "material/weather-night" name: "Switch to light mode" features: - navigation.sections - content.code.copy repo_name: jg-rp/python-jsonpath repo_url: https://github.com/jg-rp/python-jsonpath edit_uri: "" plugins: - search - mkdocstrings: default_handler: python handlers: python: options: show_source: false show_signature_annotations: true show_root_heading: true separate_signature: true docstring_section_style: "spacy" - autorefs nav: - Introduction: "index.md" - Usage: - Quick Start: "quickstart.md" - Advanced Usage: "advanced.md" - Command Line Interface: "cli.md" - Guides: - JSONPath Syntax: "syntax.md" - Filter Functions: "functions.md" - Query Iterators: "query.md" - JSON Pointers: "pointers.md" - Async Support: "async.md" - API Reference: - Package Level Functions: "convenience.md" - High Level API: "api.md" - Low Level API: "custom_api.md" - Exceptions: "exceptions.md" markdown_extensions: - admonition - pymdownx.highlight: anchor_linenums: true line_spans: __span pygments_lang_class: true - pymdownx.inlinehilite - pymdownx.snippets - pymdownx.superfences - pymdownx.details extra_css: - css/style.css watch: - jsonpath jg-rp-python-jsonpath-830094f/pyproject.toml000066400000000000000000000111401512714264000210660ustar00rootroot00000000000000[build-system] requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "python-jsonpath" description = 'JSONPath, JSON Pointer and JSON Patch for Python.' readme = "README.md" requires-python = ">=3.8" license = "MIT" keywords = ["JSON", "JSONPath", "JSON Path", "JSON Pointer", "JSON Patch", "RFC 9535"] authors = [{ name = "James Prior", email = "jamesgr.prior@gmail.com" }] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ] dependencies = [] dynamic = ["version"] [project.urls] Documentation = "https://jg-rp.github.io/python-jsonpath/" Issues = "https://github.com/jg-rp/python-jsonpath/issues" Source = "https://github.com/jg-rp/python-jsonpath" [project.optional-dependencies] strict = ["regex", "iregexp-check>=0.1.4"] [tool.hatch.version] path = "jsonpath/__about__.py" [project.scripts] json = "jsonpath.cli:main" [tool.hatch.build.targets.sdist] include = ["/jsonpath"] [tool.hatch.build.targets.wheel] include = ["/jsonpath"] [tool.hatch.envs.default] dependencies = [ "pytest", "pytest-cov", "mypy", "regex", "iregexp-check", "pyyaml", "types-pyyaml", "types-regex", "twine", "ruff", ] [tool.hatch.envs.default.scripts] cov = [ "hatch run no-regex:cov", "pytest --cov-append --cov-report=term-missing --cov-config=pyproject.toml --cov=jsonpath --cov=tests {args}" ] cov-html = [ "hatch run no-regex:cov", "pytest --cov-append --cov-report=html --cov-config=pyproject.toml --cov=jsonpath --cov=tests {args}", ] no-cov = "cov --no-cov {args}" test = "pytest {args}" lint = "ruff check ." typing = "mypy" [[tool.hatch.envs.test.matrix]] python = ["38", "39", "310", "311", "312", "313", "314", "pypy38", "pypy39"] [tool.hatch.envs.test.scripts] test = "pytest {args}" [tool.hatch.envs.docs] dependencies = ["black", "mkdocs", "mkdocstrings[python]", "mkdocs-material"] [tool.hatch.envs.docs.scripts] build = "mkdocs build --clean --strict" serve = "mkdocs serve --dev-addr localhost:8000" [tool.hatch.envs.no-regex] dependencies = ["pytest", "pytest-cov"] [tool.hatch.envs.no-regex.scripts] cov = "pytest --cov-report=term-missing --cov-config=pyproject.toml --cov=jsonpath --cov=tests tests/test_compliance.py {args}" [tool.coverage.run] branch = true parallel = true omit = ["jsonpath/__about__.py", "tests/compliance.py", "tests/consensus.py"] [tool.coverage.report] exclude_lines = ["no cov", "if __name__ == .__main__.:", "if TYPE_CHECKING:"] [tool.mypy] files = ["jsonpath", "tests"] exclude = ["tests/nts", "tests/cts"] python_version = "3.11" disallow_subclassing_any = true disallow_untyped_calls = true disallow_untyped_defs = true disallow_incomplete_defs = true no_implicit_optional = true local_partial_types = true no_implicit_reexport = true strict = true warn_redundant_casts = true warn_unused_configs = true warn_unused_ignores = false warn_return_any = true warn_unreachable = true [tool.ruff] # Exclude a variety of commonly ignored directories. exclude = [ ".bzr", ".direnv", ".eggs", ".git", ".hg", ".mypy_cache", ".nox", ".pants.d", ".pytype", ".ruff_cache", ".svn", ".tox", ".venv", "__pypackages__", "_build", "buck-out", "build", "dist", "node_modules", "venv", ] # Same as Black. line-length = 88 # Assume Python 3.10. target-version = "py310" [tool.ruff.lint] select = [ "A", "ARG", "B", "BLE", "C4", "D", "E", "F", "FBT", "I", "ICN", "ISC", "N", "PIE", "PL", "PT", "Q", "RET", "RSE", "S", "SIM", "SLF", "T10", "T20", "TCH", "TID", "YTT", ] ignore = ["S105", "S101", "D107", "D105", "PLR0913", "SIM108", "PT001", "A005"] fixable = ["I"] unfixable = [] # Allow unused variables when underscore-prefixed. dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" [tool.ruff.lint.isort] force-single-line = true [tool.ruff.lint.pydocstyle] convention = "google" [tool.ruff.lint.per-file-ignores] "jsonpath/__about__.py" = ["D100"] "jsonpath/__init__.py" = ["D104"] "jsonpath/selectors.py" = ["D102"] "jsonpath/filter.py" = ["D102", "PLW1641"] "jsonpath/unescape.py" = ["PLR2004"] "tests/*" = ["D100", "D101", "D104", "D103"] jg-rp-python-jsonpath-830094f/tests/000077500000000000000000000000001512714264000173175ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/tests/__init__.py000066400000000000000000000001561512714264000214320ustar00rootroot00000000000000# SPDX-FileCopyrightText: 2023-present James Prior # # SPDX-License-Identifier: MIT jg-rp-python-jsonpath-830094f/tests/_cts_case.py000066400000000000000000000034511512714264000216170ustar00rootroot00000000000000"""A dataclass for a test case suitable for the CTS JSON schema.""" from dataclasses import dataclass from dataclasses import field from typing import Any from typing import Dict from typing import List from typing import Mapping from typing import Optional from typing import Sequence from typing import Union from jsonpath import NodeList @dataclass class Case: name: str selector: str document: Union[Mapping[str, Any], Sequence[Any], None] = None result: Any = None results: Optional[List[Any]] = None result_paths: Optional[List[str]] = None results_paths: Optional[List[List[str]]] = None invalid_selector: Optional[bool] = None tags: List[str] = field(default_factory=list) def as_dict(self) -> Dict[str, Any]: rv: Dict[str, Any] = { "name": self.name, "selector": self.selector, } if self.document is not None: rv["document"] = self.document if self.result is not None: rv["result"] = self.result rv["result_paths"] = self.result_paths else: rv["results"] = self.results rv["results_paths"] = self.results_paths else: assert self.invalid_selector rv["invalid_selector"] = True rv["tags"] = self.tags return rv def assert_nodes(self, nodes: NodeList) -> None: """Assert that `nodes` matches this test case.""" if self.results is not None: assert self.results_paths is not None assert nodes.values() in self.results assert nodes.paths() in self.results_paths else: assert self.result_paths is not None assert nodes.values() == self.result assert nodes.paths() == self.result_paths jg-rp-python-jsonpath-830094f/tests/consensus.py000066400000000000000000000044751512714264000217230ustar00rootroot00000000000000"""Test Python JSONPath against the json-path-comparison project's regression suite. Assumes a version of the regression suite is available in the current working directory as "comparison_regression_suite.yaml". See https://github.com/cburgmer/json-path-comparison. We've deliberately named this file so as to exclude it when running `pytest` or `hatch run test`. Target it specifically using `pytest tests/consensus.py`. """ import operator import unittest from dataclasses import dataclass from typing import Any from typing import Dict from typing import List from typing import Mapping from typing import Optional from typing import Sequence from typing import Union import pytest from yaml import safe_load import jsonpath @dataclass class Query: id: str # noqa: A003 selector: str document: Union[Mapping[str, Any], Sequence[Any]] consensus: Any = None not_found_consensus: Any = None scalar_not_found_consensus: Any = None scalar_consensus: Any = None ordered: Optional[bool] = None RENAME_MAP = { "not-found-consensus": "not_found_consensus", "scalar-not-found-consensus": "scalar_not_found_consensus", "scalar-consensus": "scalar_consensus", } SKIP = { # "bracket_notation_with_number_on_object": "We support unquoted property names", "dot_notation_with_number_-1": "conflict with compliance", "dot_notation_with_number_on_object": "conflict with compliance", } def clean_query(query: Dict[str, Any]) -> Dict[str, Any]: # Replace hyphens with underscores in dict names. for old, new in RENAME_MAP.items(): if old in query: query[new] = query[old] del query[old] return query def queries() -> List[Query]: with open("comparison_regression_suite.yaml", encoding="utf8") as fd: data = safe_load(fd) return [Query(**clean_query(q)) for q in data["queries"]] QUERIES_WITH_CONSENSUS = [ q for q in queries() if q.consensus is not None and q.consensus != "NOT_SUPPORTED" ] @pytest.mark.parametrize("query", QUERIES_WITH_CONSENSUS, ids=operator.attrgetter("id")) def test_consensus(query: Query) -> None: if query.id in SKIP: pytest.skip(reason=SKIP[query.id]) case = unittest.TestCase() rv = jsonpath.findall(query.selector, query.document) case.assertCountEqual(rv, query.consensus) # noqa: PT009 jg-rp-python-jsonpath-830094f/tests/cts/000077500000000000000000000000001512714264000201105ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/tests/current_key_identifier.json000066400000000000000000000036251512714264000247540ustar00rootroot00000000000000{ "tests": [ { "name": "current key of an object", "selector": "$.some[?match(#, '^b.*')]", "document": { "some": { "foo": "a", "bar": "b", "baz": "c", "qux": "d" } }, "result": ["b", "c"], "result_paths": ["$['some']['bar']", "$['some']['baz']"] }, { "name": "current key of an array", "selector": "$.some[?# > 1]", "document": { "some": ["other", "thing", "foo", "bar"] }, "result": ["foo", "bar"], "result_paths": ["$['some'][2]", "$['some'][3]"] }, { "name": "current key of a string selects nothing", "selector": "$.some[?# > 1]", "document": { "some": "thing" }, "result": [], "result_paths": [] }, { "name": "current key of an object", "selector": "$.some[?match(#, '^b.*')]", "document": { "some": { "foo": "a", "bar": "b", "baz": "c", "qux": "d" } }, "result": ["b", "c"], "result_paths": ["$['some']['bar']", "$['some']['baz']"], "tags": ["extra"] }, { "name": "current key of an array", "selector": "$.some[?# > 1]", "document": { "some": ["other", "thing", "foo", "bar"] }, "result": ["foo", "bar"], "result_paths": ["$['some'][2]", "$['some'][3]"], "tags": ["extra"] }, { "name": "current key identifier, match on object names", "selector": "$[?match(#, '^ab.*') && length(@) > 0 ]", "document": { "abc": [1, 2, 3], "def": [4, 5], "abx": [6], "aby": [] }, "result": [[1, 2, 3], [6]], "result_paths": ["$['abc']", "$['abx']"], "tags": ["extra"] }, { "name": "current key identifier, compare current array index", "selector": "$.abc[?(# >= 1)]", "document": { "abc": [1, 2, 3], "def": [4, 5], "abx": [6], "aby": [] }, "result": [2, 3], "result_paths": ["$['abc'][1]", "$['abc'][2]"], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/key_selector.json000066400000000000000000000056701512714264000227120ustar00rootroot00000000000000{ "tests": [ { "name": "singular key from an object", "selector": "$.some[~'other']", "document": { "some": { "other": "foo", "thing": "bar" } }, "result": ["other"], "result_paths": ["$['some'][~'other']"], "tags": ["extra"] }, { "name": "singular key from an object, does not exist", "selector": "$.some[~'else']", "document": { "some": { "other": "foo", "thing": "bar" } }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "singular key from an array", "selector": "$.some[~'1']", "document": { "some": ["foo", "bar"] }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "singular key from an object, shorthand", "selector": "$.some.~other", "document": { "some": { "other": "foo", "thing": "bar" } }, "result": ["other"], "result_paths": ["$['some'][~'other']"], "tags": ["extra"] }, { "name": "recursive key from an object", "selector": "$.some..[~'other']", "document": { "some": { "other": "foo", "thing": "bar", "else": { "other": "baz" } } }, "result": ["other", "other"], "result_paths": ["$['some'][~'other']", "$['some']['else'][~'other']"], "tags": ["extra"] }, { "name": "recursive key from an object, shorthand", "selector": "$.some..~other", "document": { "some": { "other": "foo", "thing": "bar", "else": { "other": "baz" } } }, "result": ["other", "other"], "result_paths": ["$['some'][~'other']", "$['some']['else'][~'other']"], "tags": ["extra"] }, { "name": "recursive key from an object, does not exist", "selector": "$.some..[~'nosuchthing']", "document": { "some": { "other": "foo", "thing": "bar", "else": { "other": "baz" } } }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "key of nested object", "selector": "$.a[0].~c", "document": { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] }, "result": ["c"], "result_paths": ["$['a'][0][~'c']"], "tags": ["extra"] }, { "name": "key does not exist", "selector": "$.a[1].~c", "document": { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "descendant, single quoted key", "selector": "$..[~'b']", "document": { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] }, "result": ["b", "b"], "result_paths": ["$['a'][0][~'b']", "$['a'][1][~'b']"], "tags": ["extra"] }, { "name": "descendant, double quoted key", "selector": "$..[~\"b\"]", "document": { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] }, "result": ["b", "b"], "result_paths": ["$['a'][0][~'b']", "$['a'][1][~'b']"], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/keys_filter_selector.json000066400000000000000000000023771512714264000244430ustar00rootroot00000000000000{ "tests": [ { "name": "filter keys from an object", "selector": "$.some[~?match(@, '^b.*')]", "document": { "some": { "other": "foo", "thing": "bar" } }, "result": ["thing"], "result_paths": ["$['some'][~'thing']"], "tags": ["extra"] }, { "name": "keys filter selector, conditionally select object keys", "selector": "$.*[~?length(@) > 2]", "document": [ { "a": [1, 2, 3], "b": [4, 5] }, { "c": { "x": [1, 2] } }, { "d": [1, 2, 3] } ], "result": ["a", "d"], "result_paths": ["$[0][~'a']", "$[2][~'d']"], "tags": ["extra"] }, { "name": "keys filter selector, existence test", "selector": "$.*[~?@.x]", "document": [ { "a": [1, 2, 3], "b": [4, 5] }, { "c": { "x": [1, 2] } }, { "d": [1, 2, 3] } ], "result": ["c"], "result_paths": ["$[1][~'c']"], "tags": ["extra"] }, { "name": "keys filter selector, keys from an array", "selector": "$[~?(true == true)]", "document": [ { "a": [1, 2, 3], "b": [4, 5] }, { "c": { "x": [1, 2] } }, { "d": [1, 2, 3] } ], "result": [], "result_paths": [], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/keys_selector.json000066400000000000000000000041561512714264000230730ustar00rootroot00000000000000{ "tests": [ { "name": "keys from an object", "selector": "$.some[~]", "document": { "some": { "other": "foo", "thing": "bar" } }, "result": ["other", "thing"], "result_paths": ["$['some'][~'other']", "$['some'][~'thing']"], "tags": ["extra"] }, { "name": "shorthand keys from an object", "selector": "$.some.~", "document": { "some": { "other": "foo", "thing": "bar" } }, "result": ["other", "thing"], "result_paths": ["$['some'][~'other']", "$['some'][~'thing']"], "tags": ["extra"] }, { "name": "keys from an array", "selector": "$.some[~]", "document": { "some": ["other", "thing"] }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "shorthand keys from an array", "selector": "$.some.~", "document": { "some": ["other", "thing"] }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "recurse object keys", "selector": "$..~", "document": { "some": { "thing": "else", "foo": { "bar": "baz" } } }, "result": ["some", "thing", "foo", "bar"], "result_paths": [ "$[~'some']", "$['some'][~'thing']", "$['some'][~'foo']", "$['some']['foo'][~'bar']" ], "tags": ["extra"] }, { "name": "object key", "selector": "$.a[0].~", "document": { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] }, "result": ["b", "c"], "result_paths": ["$['a'][0][~'b']", "$['a'][0][~'c']"], "tags": ["extra"] }, { "name": "array key", "selector": "$.a.~", "document": { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "descendant keys", "selector": "$..[~]", "document": { "a": [{ "b": "x", "c": "z" }, { "b": "y" }] }, "result": ["a", "b", "c", "b"], "result_paths": [ "$[~'a']", "$['a'][0][~'b']", "$['a'][0][~'c']", "$['a'][1][~'b']" ], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/membership_operators.json000066400000000000000000000041211512714264000244410ustar00rootroot00000000000000{ "tests": [ { "name": "array contains literal string", "selector": "$[?@.a contains 'foo']", "document": [{ "a": ["foo", "bar"] }, { "a": ["bar"] }], "result": [ { "a": ["foo", "bar"] } ], "result_paths": ["$[0]"], "tags": ["extra"] }, { "name": "object contains literal string", "selector": "$[?@.a contains 'foo']", "document": [{ "a": { "foo": "bar" } }, { "a": { "bar": "baz" } }], "result": [ { "a": { "foo": "bar" } } ], "result_paths": ["$[0]"], "tags": ["extra"] }, { "name": "string literal in array", "selector": "$[?'foo' in @.a]", "document": [{ "a": ["foo", "bar"] }, { "a": ["bar"] }], "result": [ { "a": ["foo", "bar"] } ], "result_paths": ["$[0]"], "tags": ["extra"] }, { "name": "string literal in object", "selector": "$[?'foo' in @.a]", "document": [{ "a": { "foo": "bar" } }, { "a": { "bar": "baz" } }], "result": [ { "a": { "foo": "bar" } } ], "result_paths": ["$[0]"], "tags": ["extra"] }, { "name": "string from embedded query in object", "selector": "$[?$[-1] in @.a]", "document": [{ "a": { "foo": "bar" } }, { "a": { "bar": "baz" } }, "foo"], "result": [ { "a": { "foo": "bar" } } ], "result_paths": ["$[0]"], "tags": ["extra"] }, { "name": "embedded query in list literal", "selector": "$[?(@.a in ['bar', 'baz'])]", "document": [{ "a": "foo" }, { "a": "bar" }], "result": [ { "a": "bar" } ], "result_paths": ["$[1]"], "tags": ["extra"] }, { "name": "list literal contains embedded query", "selector": "$[?(['bar', 'baz'] contains @.a)]", "document": [{ "a": "foo" }, { "a": "bar" }], "result": [ { "a": "bar" } ], "result_paths": ["$[1]"], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/nts/000077500000000000000000000000001512714264000201235ustar00rootroot00000000000000jg-rp-python-jsonpath-830094f/tests/pseudo_root_identifier.json000066400000000000000000000014751512714264000247650ustar00rootroot00000000000000{ "tests": [ { "name": "conditionally select root value", "selector": "^[?@.some.thing > 7]", "document": { "some": { "thing": 42 } }, "result": [{ "some": { "thing": 42 } }], "result_paths": ["^[0]"], "tags": ["extra"] }, { "name": "embedded pseudo root query", "selector": "^[?@.some.thing > value(^.*.num)]", "document": { "some": { "thing": 42 }, "num": 7 }, "result": [{ "some": { "thing": 42 }, "num": 7 }], "result_paths": ["^[0]"], "tags": ["extra"] }, { "name": "embedded root query", "selector": "^[?@.some.thing > value($.num)]", "document": { "some": { "thing": 42 }, "num": 7 }, "result": [{ "some": { "thing": 42 }, "num": 7 }], "result_paths": ["^[0]"], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/query_intersection.json000066400000000000000000000011761512714264000241520ustar00rootroot00000000000000{ "tests": [ { "name": "intersection of two paths, no common items", "selector": "$.some & $.thing", "document": { "some": [1, 2, 3], "thing": [4, 5, 6], "other": ["a", "b", "c"] }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "intersection of two paths, with common items", "selector": "$.some & $.thing", "document": { "some": [1, 2, 3], "thing": [1, 2, 3], "other": ["a", "b", "c"] }, "result": [[1, 2, 3]], "result_paths": ["$['some']"], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/query_union.json000066400000000000000000000013651512714264000225740ustar00rootroot00000000000000{ "tests": [ { "name": "union of two paths", "selector": "$.some | $.thing", "document": { "some": [1, 2, 3], "thing": [4, 5, 6], "other": ["a", "b", "c"] }, "result": [ [1, 2, 3], [4, 5, 6] ], "result_paths": ["$['some']", "$['thing']"], "tags": ["extra"] }, { "name": "union of three paths", "selector": "$.some | $.thing | $.other", "document": { "some": [1, 2, 3], "thing": [4, 5, 6], "other": ["a", "b", "c"] }, "result": [ [1, 2, 3], [4, 5, 6], ["a", "b", "c"] ], "result_paths": ["$['some']", "$['thing']", "$['other']"], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/regex_operator.json000066400000000000000000000035231512714264000232420ustar00rootroot00000000000000{ "tests": [ { "name": "regex literal, match", "selector": "$.some[?(@.thing =~ /fo[a-z]/)]", "document": { "some": [{ "thing": "foo" }] }, "result": [{ "thing": "foo" }], "result_paths": ["$['some'][0]"], "tags": ["extra"] }, { "name": "regex literal, no match", "selector": "$.some[?(@.thing =~ /fo[a-z]/)]", "document": { "some": [{ "thing": "foO" }] }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "regex literal, case insensitive match", "selector": "$.some[?(@.thing =~ /fo[a-z]/i)]", "document": { "some": [{ "thing": "foO" }] }, "result": [{ "thing": "foO" }], "result_paths": ["$['some'][0]"], "tags": ["extra"] }, { "name": "regex literal, escaped backslash", "selector": "$.some[?(@.thing =~ /fo\\\\[a-z]/)]", "document": { "some": [{ "thing": "fo\\b" }] }, "result": [{ "thing": "fo\\b" }], "result_paths": ["$['some'][0]"], "tags": ["extra"] }, { "name": "regex literal, escaped slash", "selector": "$.some[?(@.thing =~ /fo\\/[a-z]/)]", "document": { "some": [{ "thing": "fo/b" }] }, "result": [{ "thing": "fo/b" }], "result_paths": ["$['some'][0]"], "tags": ["extra"] }, { "name": "regex literal, escaped asterisk", "selector": "$.some[?(@.thing =~ /fo\\*[a-z]/)]", "document": { "some": [{ "thing": "fo*b" }] }, "result": [{ "thing": "fo*b" }], "result_paths": ["$['some'][0]"], "tags": ["extra"] }, { "name": "regex literal, escaped dot", "selector": "$.some[?(@.thing =~ /fo\\.[a-z]/)]", "document": { "some": [{ "thing": "fo.b" }] }, "result": [{ "thing": "fo.b" }], "result_paths": ["$['some'][0]"], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/singular_path_selector.json000066400000000000000000000046571512714264000247660ustar00rootroot00000000000000{ "tests": [ { "name": "object name from embedded singular query", "selector": "$.a[$.b[1]]", "document": { "a": { "j": [1, 2, 3], "p": { "q": [4, 5, 6] } }, "b": ["j", "p", "q"], "c d": { "x": { "y": 1 } } }, "result": [{ "q": [4, 5, 6] }], "result_paths": ["$['a']['p']"], "tags": ["extra"] }, { "name": "array index from embedded singular query", "selector": "$.a.j[$['c d'].x.y]", "document": { "a": { "j": [1, 2, 3], "p": { "q": [4, 5, 6] } }, "b": ["j", "p", "q"], "c d": { "x": { "y": 1 } } }, "result": [2], "result_paths": ["$['a']['j'][1]"], "tags": ["extra"] }, { "name": "embedded singular query does not resolve to a string or int value", "selector": "$.a[$.b]", "document": { "a": { "j": [1, 2, 3], "p": { "q": [4, 5, 6] } }, "b": ["j", "p", "q"], "c d": { "x": { "y": 1 } } }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "object name from embedded singular query resolving to nothing", "selector": "$.a[$.foo]", "document": { "a": { "j": [1, 2, 3], "p": { "q": [4, 5, 6] } }, "b": ["j", "p", "q"], "c d": { "x": { "y": 1 } } }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "array index from embedded singular query resolving to nothing", "selector": "$.b[$.foo]", "document": { "a": { "j": [1, 2, 3], "p": { "q": [4, 5, 6] } }, "b": ["j", "p", "q"], "c d": { "x": { "y": 1 } } }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "array index from embedded singular query is not an int", "selector": "$.b[$.a.z]", "document": { "a": { "j": [1, 2, 3], "p": { "q": [4, 5, 6] }, "z": "foo" }, "b": ["j", "p", "q"], "c d": { "x": { "y": 1 } } }, "result": [], "result_paths": [], "tags": ["extra"] }, { "name": "array index from embedded singular query is negative", "selector": "$.b[$.a.z]", "document": { "a": { "j": [1, 2, 3], "p": { "q": [4, 5, 6] }, "z": -1 }, "b": ["j", "p", "q"], "c d": { "x": { "y": 1 } } }, "result": ["q"], "result_paths": ["$['b'][2]"], "tags": ["extra"] } ] } jg-rp-python-jsonpath-830094f/tests/test_async.py000066400000000000000000000023241512714264000220460ustar00rootroot00000000000000import asyncio from typing import Iterator from typing import List from typing import Mapping import jsonpath class MockLazyMapping(Mapping[str, object]): def __init__(self, val: object): self.key = "bar" self.val = val self.call_count = 0 self.await_count = 0 def __len__(self) -> int: # pragma: no cover return 1 def __iter__(self) -> Iterator[str]: # pragma: no cover return iter([self.key]) def __getitem__(self, k: str) -> object: # pragma: no cover self.call_count += 1 if k == self.key: return self.val raise KeyError(k) async def __getitem_async__(self, k: str) -> object: self.await_count += 1 if k == self.key: return self.val raise KeyError(k) def test_async_getitem() -> None: lazy_mapping = MockLazyMapping("thing") data = {"foo": lazy_mapping} async def coro() -> List[object]: return await jsonpath.findall_async("$.foo.bar | $.foo.nosuchthing", data) matches = asyncio.run(coro()) assert len(matches) == 1 assert matches[0] == "thing" assert lazy_mapping.call_count == 0 assert lazy_mapping.await_count == 2 # noqa: PLR2004 jg-rp-python-jsonpath-830094f/tests/test_cli.py000066400000000000000000000411121512714264000214760ustar00rootroot00000000000000"""Test cases for the command line interface.""" import argparse import json import pathlib import pytest from jsonpath.__about__ import __version__ from jsonpath.cli import handle_patch_command from jsonpath.cli import handle_path_command from jsonpath.cli import handle_pointer_command from jsonpath.cli import setup_parser from jsonpath.exceptions import JSONPatchTestFailure from jsonpath.exceptions import JSONPathIndexError from jsonpath.exceptions import JSONPathSyntaxError from jsonpath.exceptions import JSONPathTypeError from jsonpath.exceptions import JSONPointerResolutionError from jsonpath.patch import JSONPatch SAMPLE_DATA = { "categories": [ { "name": "footwear", "products": [ { "title": "Trainers", "description": "Fashionable trainers.", "price": 89.99, }, { "title": "Barefoot Trainers", "description": "Running trainers.", "price": 130.00, }, ], }, { "name": "headwear", "products": [ { "title": "Cap", "description": "Baseball cap", "price": 15.00, }, { "title": "Beanie", "description": "Winter running hat.", "price": 9.00, }, ], }, ], "price_cap": 10, } @pytest.fixture() def parser() -> argparse.ArgumentParser: return setup_parser() @pytest.fixture() def invalid_target(tmp_path: pathlib.Path) -> str: target_path = tmp_path / "source.json" with open(target_path, "w") as fd: fd.write(r"}}invalid") return str(target_path) @pytest.fixture() def outfile(tmp_path: pathlib.Path) -> str: output_path = tmp_path / "result.json" return str(output_path) @pytest.fixture() def sample_target(tmp_path: pathlib.Path) -> str: target_path = tmp_path / "source.json" with open(target_path, "w") as fd: json.dump(SAMPLE_DATA, fd) return str(target_path) def test_no_sub_command( parser: argparse.ArgumentParser, capsys: pytest.CaptureFixture[str] ) -> None: """Test that the CLI exits without a sub command.""" with pytest.raises(SystemExit) as err: parser.parse_args([]) captured = capsys.readouterr() assert err.value.code == 2 # noqa: PLR2004 assert ( captured.err.strip() == parser.format_usage() + "json: error: the following arguments are required: COMMAND" ) def test_help( parser: argparse.ArgumentParser, capsys: pytest.CaptureFixture[str] ) -> None: """Test that the CLI can display a help message without a command.""" with pytest.raises(SystemExit) as err: parser.parse_args(["-h"]) captured = capsys.readouterr() assert err.value.code == 0 assert captured.out == parser.format_help() def test_version( parser: argparse.ArgumentParser, capsys: pytest.CaptureFixture[str] ) -> None: """Test that the CLI can display a version number without a command.""" with pytest.raises(SystemExit) as err: parser.parse_args(["--version"]) captured = capsys.readouterr() assert err.value.code == 0 assert captured.out.strip() == f"python-jsonpath, version {__version__}" def test_path_command_invalid_target( parser: argparse.ArgumentParser, invalid_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle invalid JSON with the _path_ command.""" args = parser.parse_args(["path", "-q", "$.foo", "-f", invalid_target]) with pytest.raises(SystemExit) as err: handle_path_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("target document json decode error:") def test_path_command_invalid_target_debug( parser: argparse.ArgumentParser, invalid_target: str, ) -> None: """Test that we handle invalid JSON with the _path_ command.""" args = parser.parse_args(["--debug", "path", "-q", "$.foo", "-f", invalid_target]) with pytest.raises(json.JSONDecodeError): handle_path_command(args) def test_json_path_syntax_error( parser: argparse.ArgumentParser, sample_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle a JSONPath with a syntax error.""" args = parser.parse_args(["path", "-q", "$.1", "-f", sample_target]) with pytest.raises(SystemExit) as err: handle_path_command(args) assert err.value.code == 1 captured = capsys.readouterr() assert captured.err.startswith("json path syntax error") def test_json_path_syntax_error_debug( parser: argparse.ArgumentParser, sample_target: str, ) -> None: """Test that we handle a JSONPath with a syntax error.""" args = parser.parse_args(["--debug", "path", "-q", "$.1", "-f", sample_target]) with pytest.raises(JSONPathSyntaxError): handle_path_command(args) def test_json_path_type_error( parser: argparse.ArgumentParser, sample_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle a JSONPath with a type error.""" args = parser.parse_args( ["path", "-q", "$.foo[?count(@.bar, 'baz')]", "-f", sample_target] ) with pytest.raises(SystemExit) as err: handle_path_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("json path type error") def test_json_path_type_error_debug( parser: argparse.ArgumentParser, sample_target: str, ) -> None: """Test that we handle a JSONPath with a type error.""" args = parser.parse_args( ["--debug", "path", "-q", "$.foo[?count(@.bar, 'baz')]", "-f", sample_target] ) with pytest.raises(JSONPathTypeError): handle_path_command(args) def test_json_path_no_well_typed_checks( parser: argparse.ArgumentParser, sample_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we can disable well-typedness checks.""" # `count()` must be compared query = "$[?count(@..*)]" args = parser.parse_args( [ "path", "-q", query, "-f", sample_target, ] ) with pytest.raises(SystemExit) as err: handle_path_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("json path type error") args = parser.parse_args( [ "path", "-q", query, "--no-type-checks", "-f", sample_target, ] ) # does not raise handle_path_command(args) def test_json_path_index_error( parser: argparse.ArgumentParser, sample_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle a JSONPath with a syntax error.""" args = parser.parse_args(["path", "-q", f"$.foo[{2**53}]", "-f", sample_target]) with pytest.raises(SystemExit) as err: handle_path_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("json path index error") def test_json_path_index_error_debug( parser: argparse.ArgumentParser, sample_target: str, ) -> None: """Test that we handle a JSONPath with a syntax error.""" args = parser.parse_args( ["--debug", "path", "-q", f"$.foo[{2**53}]", "-f", sample_target] ) with pytest.raises(JSONPathIndexError): handle_path_command(args) def test_json_path( parser: argparse.ArgumentParser, sample_target: str, outfile: str, ) -> None: """Test a valid JSONPath.""" args = parser.parse_args( ["path", "-q", "$..products.*", "-f", sample_target, "-o", outfile] ) handle_path_command(args) args.output.flush() with open(outfile, "r") as fd: assert len(json.load(fd)) == 4 # noqa: PLR2004 def test_json_path_strict( parser: argparse.ArgumentParser, sample_target: str, outfile: str, ) -> None: """Test a valid JSONPath.""" args = parser.parse_args( [ "--debug", "path", "-q", "price_cap", # No root identifier is an error in strict mode. "-f", sample_target, "-o", outfile, "--strict", ] ) with pytest.raises(JSONPathSyntaxError): handle_path_command(args) args = parser.parse_args( [ "path", "-q", "$.price_cap", # With a root identifier is OK. "-f", sample_target, "-o", outfile, "--strict", ] ) handle_path_command(args) args.output.flush() with open(outfile, "r") as fd: rv = json.load(fd) assert rv == [10] def test_pointer_command_invalid_target( parser: argparse.ArgumentParser, invalid_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle invalid JSON with the _pointer_ command.""" args = parser.parse_args(["pointer", "-p", "/foo/bar", "-f", invalid_target]) with pytest.raises(SystemExit) as err: handle_pointer_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("target document json decode error:") def test_pointer_command_invalid_target_debug( parser: argparse.ArgumentParser, invalid_target: str, ) -> None: """Test that we handle invalid JSON with the _pointer_ command.""" args = parser.parse_args( ["--debug", "pointer", "-p", "/foo/bar", "-f", invalid_target] ) with pytest.raises(json.JSONDecodeError): handle_pointer_command(args) def test_pointer_command_resolution_error( parser: argparse.ArgumentParser, sample_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle pointer resolution errors.""" args = parser.parse_args(["pointer", "-p", "/foo/bar", "-f", sample_target]) with pytest.raises(SystemExit) as err: handle_pointer_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("pointer key error: 'foo'") def test_pointer_command_resolution_error_debug( parser: argparse.ArgumentParser, sample_target: str ) -> None: """Test that we handle pointer resolution errors.""" args = parser.parse_args( ["--debug", "pointer", "-p", "/foo/bar", "-f", sample_target] ) with pytest.raises(JSONPointerResolutionError): handle_pointer_command(args) def test_json_pointer( parser: argparse.ArgumentParser, sample_target: str, outfile: str ) -> None: """Test a valid JSON Pointer.""" args = parser.parse_args( ["pointer", "-p", "/categories/0/name", "-f", sample_target, "-o", outfile] ) handle_pointer_command(args) args.output.flush() with open(outfile, "r") as fd: assert json.load(fd) == "footwear" def test_json_pointer_empty_string( parser: argparse.ArgumentParser, sample_target: str, outfile: str ) -> None: """Test an empty JSON Pointer is valid.""" args = parser.parse_args(["pointer", "-p", "", "-f", sample_target, "-o", outfile]) handle_pointer_command(args) args.output.flush() with open(outfile, "r") as fd: assert json.load(fd) == SAMPLE_DATA def test_read_pointer_from_file( parser: argparse.ArgumentParser, sample_target: str, outfile: str, tmp_path: pathlib.Path, ) -> None: """Test an empty JSON Pointer is valid.""" pointer_file_path = tmp_path / "pointer.txt" with pointer_file_path.open("w") as fd: fd.write("/price_cap") args = parser.parse_args( ["pointer", "-r", str(pointer_file_path), "-f", sample_target, "-o", outfile] ) handle_pointer_command(args) args.output.flush() with open(outfile, "r") as fd: assert json.load(fd) == SAMPLE_DATA["price_cap"] def test_patch_command_invalid_patch( parser: argparse.ArgumentParser, sample_target: str, invalid_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle invalid patch JSON.""" args = parser.parse_args(["patch", invalid_target, "-f", sample_target]) with pytest.raises(SystemExit) as err: handle_patch_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("patch document json decode error:") def test_patch_command_invalid_patch_debug( parser: argparse.ArgumentParser, sample_target: str, invalid_target: str, ) -> None: """Test that we handle invalid patch JSON.""" args = parser.parse_args(["--debug", "patch", invalid_target, "-f", sample_target]) with pytest.raises(json.JSONDecodeError): handle_patch_command(args) def test_patch_not_an_array( parser: argparse.ArgumentParser, tmp_path: pathlib.Path, sample_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle a patch that is not an array.""" mock_patch_path = tmp_path / "patch.json" with mock_patch_path.open("w") as fd: json.dump({"foo": "bar"}, fd) args = parser.parse_args(["patch", str(mock_patch_path), "-f", sample_target]) with pytest.raises(SystemExit) as err: handle_patch_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err == ( "error: patch file does not look like an array of patch operations" ) def test_patch_command_invalid_target( parser: argparse.ArgumentParser, tmp_path: pathlib.Path, invalid_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle invalid JSON with the _patch_ command.""" mock_patch_path = tmp_path / "patch.json" with mock_patch_path.open("w") as fd: json.dump([], fd) args = parser.parse_args(["patch", str(mock_patch_path), "-f", invalid_target]) with pytest.raises(SystemExit) as err: handle_patch_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("target document json decode error:") def test_patch_command_invalid_target_debug( parser: argparse.ArgumentParser, tmp_path: pathlib.Path, invalid_target: str, ) -> None: """Test that we handle invalid JSON with the _patch_ command.""" mock_patch_path = tmp_path / "patch.json" with mock_patch_path.open("w") as fd: json.dump([], fd) args = parser.parse_args( ["--debug", "patch", str(mock_patch_path), "-f", invalid_target] ) with pytest.raises(json.JSONDecodeError): handle_patch_command(args) def test_patch_error( parser: argparse.ArgumentParser, tmp_path: pathlib.Path, sample_target: str, capsys: pytest.CaptureFixture[str], ) -> None: """Test that we handle patch errors.""" mock_patch_path = tmp_path / "patch.json" patch = JSONPatch().test("/categories/0/name", "foo") with mock_patch_path.open("w") as fd: json.dump(patch.asdicts(), fd) args = parser.parse_args(["patch", str(mock_patch_path), "-f", sample_target]) with pytest.raises(SystemExit) as err: handle_patch_command(args) captured = capsys.readouterr() assert err.value.code == 1 assert captured.err.startswith("test failed") def test_patch_error_debug( parser: argparse.ArgumentParser, tmp_path: pathlib.Path, sample_target: str, ) -> None: """Test that we handle patch errors.""" mock_patch_path = tmp_path / "patch.json" patch = JSONPatch().test("/categories/0/name", "foo") with mock_patch_path.open("w") as fd: json.dump(patch.asdicts(), fd) args = parser.parse_args( ["--debug", "patch", str(mock_patch_path), "-f", sample_target] ) with pytest.raises(JSONPatchTestFailure): handle_patch_command(args) def test_json_patch( parser: argparse.ArgumentParser, tmp_path: pathlib.Path, sample_target: str, outfile: str, ) -> None: """Test a valid JSON patch.""" mock_patch_path = tmp_path / "patch.json" patch = JSONPatch().replace("/categories/0/name", "foo") with mock_patch_path.open("w") as fd: json.dump(patch.asdicts(), fd) args = parser.parse_args( ["patch", str(mock_patch_path), "-f", sample_target, "-o", outfile] ) handle_patch_command(args) args.output.flush() with open(outfile, "r") as fd: patched = json.load(fd) assert patched["categories"][0]["name"] == "foo" jg-rp-python-jsonpath-830094f/tests/test_compare.py000066400000000000000000000034321512714264000223600ustar00rootroot00000000000000"""Default filter expression comparison test cases.""" import dataclasses import operator import pytest from jsonpath import JSONPathEnvironment @dataclasses.dataclass class Case: description: str left: object op: str right: object want: bool TEST_CASES = [ Case( description="true and true", left=True, op="&&", right=True, want=True, ), Case( description="left in right", left="thing", op="in", right=["some", "thing"], want=True, ), Case( description="right contains left", left=["some", "thing"], op="contains", right="thing", want=True, ), Case( description="string >= string", left="thing", op=">=", right="thing", want=True, ), Case( description="string < string", left="abc", op="<", right="bcd", want=True, ), Case( description="string > string", left="bcd", op=">", right="abcd", want=True, ), Case( description="int >= int", left=2, op=">=", right=1, want=True, ), Case( description="nil >= nil", left=None, op=">=", right=None, want=True, ), Case( description="nil <= nil", left=None, op="<=", right=None, want=True, ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_compare(env: JSONPathEnvironment, case: Case) -> None: result = env.compare(case.left, case.op, case.right) assert result == case.want jg-rp-python-jsonpath-830094f/tests/test_compliance.py000066400000000000000000000117131512714264000230450ustar00rootroot00000000000000"""Test Python JSONPath against the JSONPath Compliance Test Suite. The CTS is a submodule located in /tests/cts. After a git clone, run `git submodule update --init` from the root of the repository. """ import asyncio import json import operator from typing import List import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathError from jsonpath import NodeList from ._cts_case import Case # CTS tests that are expected to fail when JSONPathEnvironment.strict is False. XFAIL_INVALID = { "basic, no leading whitespace", "basic, no trailing whitespace", "basic, root node identifier in brackets without filter selector", "filter, equals number, invalid 00", "filter, equals number, invalid leading 0", "filter, true, incorrectly capitalized", "filter, false, incorrectly capitalized", "filter, null, incorrectly capitalized", "name selector, double quotes, single high surrogate", "name selector, double quotes, single low surrogate", "name selector, double quotes, high high surrogate", "name selector, double quotes, low low surrogate", "name selector, double quotes, surrogate non-surrogate", "name selector, double quotes, non-surrogate surrogate", "name selector, double quotes, surrogate supplementary", "name selector, double quotes, supplementary surrogate", } XFAIL_VALID = { "filter, index segment on object, selects nothing", } # CTS test that will only pass if the third party `regex` package is installed. REGEX_ONLY = { "functions, match, dot matcher on \\u2028", "functions, match, dot matcher on \\u2029", "functions, search, dot matcher on \\u2028", "functions, search, dot matcher on \\u2029", "functions, match, filter, match function, unicode char class, uppercase", "functions, match, filter, match function, unicode char class negated, uppercase", "functions, search, filter, search function, unicode char class, uppercase", "functions, search, filter, search function, unicode char class negated, uppercase", } with open("tests/cts/cts.json", encoding="utf8") as fd: data = json.load(fd) CASES = [Case(**case) for case in data["tests"]] def valid_cases() -> List[Case]: return [case for case in CASES if not case.invalid_selector] def invalid_cases() -> List[Case]: return [case for case in CASES if case.invalid_selector] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=True) @pytest.mark.parametrize("case", valid_cases(), ids=operator.attrgetter("name")) def test_compliance_strict(env: JSONPathEnvironment, case: Case) -> None: if not env.regex_available and case.name in REGEX_ONLY: pytest.skip(reason="requires regex package") assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", valid_cases(), ids=operator.attrgetter("name")) def test_compliance_async_strict(env: JSONPathEnvironment, case: Case) -> None: if not env.regex_available and case.name in REGEX_ONLY: pytest.skip(reason="requires regex package") async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", invalid_cases(), ids=operator.attrgetter("name")) def test_invalid_selectors_strict(env: JSONPathEnvironment, case: Case) -> None: with pytest.raises(JSONPathError): env.compile(case.selector) @pytest.mark.parametrize("case", valid_cases(), ids=operator.attrgetter("name")) def test_compliance_lax(case: Case) -> None: env = JSONPathEnvironment(strict=False) if not env.regex_available and case.name in REGEX_ONLY: pytest.skip(reason="requires regex package") assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) if case.results is not None: assert case.results_paths is not None if case.name in XFAIL_VALID: assert nodes.values() not in case.results assert nodes.paths() in case.results_paths else: assert nodes.values() in case.results assert nodes.paths() in case.results_paths else: assert case.result_paths is not None if case.name in XFAIL_VALID: assert nodes.values() != case.result assert nodes.paths() != case.result_paths else: assert nodes.values() == case.result assert nodes.paths() == case.result_paths @pytest.mark.parametrize("case", invalid_cases(), ids=operator.attrgetter("name")) def test_invalid_selectors_lax(case: Case) -> None: env = JSONPathEnvironment(strict=False) if case.name in XFAIL_INVALID: env.compile(case.selector) else: with pytest.raises(JSONPathError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_convenience_api.py000066400000000000000000000064351512714264000240650ustar00rootroot00000000000000import asyncio from typing import List import pytest import jsonpath def test_convenience_compile() -> None: # Implicit root identifier works by default, but not when strict=True. path = jsonpath.compile("a.*") assert isinstance(path, jsonpath.JSONPath) assert path.findall({"a": [1, 2, 3]}) == [1, 2, 3] def test_convenience_compile_strict() -> None: with pytest.raises(jsonpath.JSONPathSyntaxError): jsonpath.compile("a.*", strict=True) path = jsonpath.compile("$.a.*", strict=True) assert isinstance(path, jsonpath.JSONPath) assert path.findall({"a": [1, 2, 3]}) == [1, 2, 3] def test_convenience_findall() -> None: assert jsonpath.findall("a.*", {"a": [1, 2, 3]}) == [1, 2, 3] def test_convenience_findall_strict() -> None: with pytest.raises(jsonpath.JSONPathSyntaxError): jsonpath.findall("a.*", {"a": [1, 2, 3]}, strict=True) assert jsonpath.findall("$.a.*", {"a": [1, 2, 3]}, strict=True) == [1, 2, 3] def test_convenience_findall_async() -> None: async def coro() -> List[object]: return await jsonpath.findall_async("a.*", {"a": [1, 2, 3]}) assert asyncio.run(coro()) == [1, 2, 3] def test_convenience_findall_async_strict() -> None: async def coro() -> List[object]: with pytest.raises(jsonpath.JSONPathSyntaxError): await jsonpath.findall_async("a.*", {"a": [1, 2, 3]}, strict=True) return await jsonpath.findall_async("$.a.*", {"a": [1, 2, 3]}, strict=True) assert asyncio.run(coro()) == [1, 2, 3] def test_convenience_finditer() -> None: matches = list(jsonpath.finditer("a.*", {"a": [1, 2, 3]})) assert [m.obj for m in matches] == [1, 2, 3] def test_convenience_finditer_strict() -> None: with pytest.raises(jsonpath.JSONPathSyntaxError): list(jsonpath.finditer("a.*", {"a": [1, 2, 3]}, strict=True)) matches = list(jsonpath.finditer("$.a.*", {"a": [1, 2, 3]}, strict=True)) assert [m.obj for m in matches] == [1, 2, 3] def test_convenience_finditer_async_strict() -> None: async def coro() -> List[object]: with pytest.raises(jsonpath.JSONPathSyntaxError): await jsonpath.finditer_async("a.*", {"a": [1, 2, 3]}, strict=True) it = await jsonpath.finditer_async("$.a.*", {"a": [1, 2, 3]}, strict=True) return [m.obj async for m in it] assert asyncio.run(coro()) == [1, 2, 3] def test_convenience_match() -> None: match = jsonpath.match("a.*", {"a": [1, 2, 3]}) assert isinstance(match, jsonpath.JSONPathMatch) assert match.obj == 1 def test_convenience_match_strict() -> None: with pytest.raises(jsonpath.JSONPathSyntaxError): jsonpath.match("a.*", {"a": [1, 2, 3]}, strict=True) match = jsonpath.match("$.a.*", {"a": [1, 2, 3]}) assert isinstance(match, jsonpath.JSONPathMatch) assert match.obj == 1 def test_convenience_query() -> None: query = jsonpath.query("a.*", {"a": [1, 2, 3]}) assert isinstance(query, jsonpath.Query) assert list(query.values()) == [1, 2, 3] def test_convenience_query_strict() -> None: with pytest.raises(jsonpath.JSONPathSyntaxError): jsonpath.query("a.*", {"a": [1, 2, 3]}, strict=True) query = jsonpath.query("$.a.*", {"a": [1, 2, 3]}) assert isinstance(query, jsonpath.Query) assert list(query.values()) == [1, 2, 3] jg-rp-python-jsonpath-830094f/tests/test_current_key_identifier.py000066400000000000000000000025761512714264000254760ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/current_key_identifier.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_current_key_identifier(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_current_key_identifier_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_current_key_identifier_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_env.py000066400000000000000000000162001512714264000215170ustar00rootroot00000000000000"""JSONPathEnvironment API test cases.""" import asyncio from typing import List import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import JSONPathTypeError @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() def test_find_all_from_object(env: JSONPathEnvironment) -> None: """Test that we can pass a Python object to findall.""" rv = env.findall("$.some", {"some": 1, "thing": 2}) assert rv == [1] def test_find_all_from_json_string(env: JSONPathEnvironment) -> None: """Test that we can pass a JSON string to findall.""" rv = env.findall("$.some", '{"some": 1, "thing": 2}') assert rv == [1] def test_find_all_with_extra_filter_context(env: JSONPathEnvironment) -> None: """Test that we can pass extra filter context to findall.""" rv = env.findall( "$[?(@.some == _.other)]", {"foo": {"some": 1, "thing": 2}}, filter_context={"other": 1}, ) assert rv == [{"some": 1, "thing": 2}] def test_find_iter_from_object(env: JSONPathEnvironment) -> None: """Test that we can pass a Python object to finditer.""" matches = env.finditer("$.some", {"some": 1, "thing": 2}) assert [match.obj for match in matches] == [1] def test_find_iter_from_json_string(env: JSONPathEnvironment) -> None: """Test that we can pass a JSON string to finditer.""" matches = env.finditer("$.some", '{"some": 1, "thing": 2}') assert [match.obj for match in matches] == [1] def test_find_iter_with_extra_filter_context(env: JSONPathEnvironment) -> None: """Test that we can pass extra filter context to finditer.""" matches = env.finditer( "$[?(@.some == _.other)]", {"foo": {"some": 1, "thing": 2}}, filter_context={"other": 1}, ) assert [match.obj for match in matches] == [{"some": 1, "thing": 2}] def test_find_all_async_from_object(env: JSONPathEnvironment) -> None: """Test that we can pass a Python object to findall_async.""" async def coro() -> List[object]: return await env.findall_async("$.some", {"some": 1, "thing": 2}) assert asyncio.run(coro()) == [1] def test_find_all_async_from_json_string(env: JSONPathEnvironment) -> None: """Test that we can pass a JSON string to findall.""" async def coro() -> List[object]: return await env.findall_async("$.some", '{"some": 1, "thing": 2}') assert asyncio.run(coro()) == [1] def test_find_all_async_with_extra_filter_context(env: JSONPathEnvironment) -> None: """Test that we can pass extra filter context to findall_async.""" async def coro() -> List[object]: return await env.findall_async( "$[?(@.some == _.other)]", {"foo": {"some": 1, "thing": 2}}, filter_context={"other": 1}, ) assert asyncio.run(coro()) == [{"some": 1, "thing": 2}] def test_find_iter_async_from_object(env: JSONPathEnvironment) -> None: """Test that we can pass a Python object to finditer.""" async def coro() -> List[object]: matches = await env.finditer_async("$.some", {"some": 1, "thing": 2}) return [match.obj async for match in matches] assert asyncio.run(coro()) == [1] def test_find_iter_async_from_json_string(env: JSONPathEnvironment) -> None: """Test that we can pass a JSON string to finditer.""" async def coro() -> List[object]: matches = await env.finditer_async("$.some", '{"some": 1, "thing": 2}') return [match.obj async for match in matches] assert asyncio.run(coro()) == [1] def test_find_iter_async_with_extra_filter_context(env: JSONPathEnvironment) -> None: """Test that we can pass extra filter context to finditer.""" async def coro() -> List[object]: matches = await env.finditer_async( "$[?(@.some == _.other)]", {"foo": {"some": 1, "thing": 2}}, filter_context={"other": 1}, ) return [match.obj async for match in matches] assert asyncio.run(coro()) == [{"some": 1, "thing": 2}] def test_match(env: JSONPathEnvironment) -> None: """Test that we can get the first match of a path.""" match = env.match("$.some", {"some": 1, "thing": 2}) assert match is not None assert match.obj == 1 def test_no_match(env: JSONPathEnvironment) -> None: """Test that we get `None` if there are no matches.""" match = env.match("$.other", {"some": 1, "thing": 2}) assert match is None def test_match_compound_path(env: JSONPathEnvironment) -> None: """Test that we can get the first match of a compound path.""" match = env.match("$.some | $.thing", {"some": 1, "thing": 2}) assert match is not None assert match.obj == 1 def test_no_match_compound_path(env: JSONPathEnvironment) -> None: """Test that we get `None` if there are no matches in a compound path.""" match = env.match("$.other | $.foo", {"some": 1, "thing": 2}) assert match is None def test_no_unicode_escape() -> None: """Test that we can disable decoding of UTF-16 escape sequences.""" document = {"𝄞": "A"} selector = '$["\\uD834\\uDD1E"]' env = JSONPathEnvironment(unicode_escape=True) assert env.findall(selector, document) == ["A"] env = JSONPathEnvironment(unicode_escape=False) assert env.findall(selector, document) == [] assert env.findall(selector, {"\\uD834\\uDD1E": "B"}) == ["B"] def test_custom_keys_selector_token() -> None: """Test that we can change the non-standard keys selector.""" class MyJSONPathEnvironment(JSONPathEnvironment): keys_selector_token = "*~" env = MyJSONPathEnvironment() data = {"foo": {"a": 1, "b": 2, "c": 3}} assert env.findall("$.foo.*~", data) == ["a", "b", "c"] assert env.findall("$.foo.*", data) == [1, 2, 3] def test_custom_fake_root_identifier_token() -> None: """Test that we can change the non-standard fake root identifier.""" class MyJSONPathEnvironment(JSONPathEnvironment): pseudo_root_token = "$$" env = MyJSONPathEnvironment() data = {"foo": {"a": 1, "b": 2, "c": 3}} assert env.findall("$$[?@.foo.a == 1]", data) == [data] assert env.findall("$$[?@.foo.a == 7]", data) == [] assert env.findall("$.*", data) == [{"a": 1, "b": 2, "c": 3}] def test_disable_fake_root_identifier() -> None: """Test that we can disable the non-standard fake root identifier.""" class MyJSONPathEnvironment(JSONPathEnvironment): pseudo_root_token = "" env = MyJSONPathEnvironment() with pytest.raises(JSONPathSyntaxError): env.compile("^[?@.a == 42]") def test_disable_keys_selector() -> None: """Test that we can disable the non-standard keys selector.""" class MyJSONPathEnvironment(JSONPathEnvironment): keys_selector_token = "" env = MyJSONPathEnvironment() with pytest.raises(JSONPathSyntaxError): env.compile("*..~") def test_disable_well_typed_checks() -> None: """Test that we can disable checks for well-typedness.""" env = JSONPathEnvironment(well_typed=True) with pytest.raises(JSONPathTypeError): env.compile("$[?@.* > 2]") env = JSONPathEnvironment(well_typed=False) env.compile("$[?@.* > 2]") jg-rp-python-jsonpath-830094f/tests/test_errors.py000066400000000000000000000063231512714264000222500ustar00rootroot00000000000000from operator import attrgetter from typing import Any from typing import List from typing import NamedTuple import pytest from jsonpath import JSONPathEnvironment from jsonpath.exceptions import JSONPathRecursionError from jsonpath.exceptions import JSONPathSyntaxError from jsonpath.exceptions import JSONPathTypeError @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() def test_unclosed_selection_list(env: JSONPathEnvironment) -> None: with pytest.raises(JSONPathSyntaxError, match=r"unexpected end of segment"): env.compile("$[1,2") def test_function_missing_param(env: JSONPathEnvironment) -> None: with pytest.raises(JSONPathTypeError, match=r"length\(\) requires 1 argument"): env.compile("$[?(length()==1)]") def test_function_too_many_params(env: JSONPathEnvironment) -> None: with pytest.raises(JSONPathTypeError): env.compile("$[?(length(@.a, @.b)==1)]") def test_non_singular_query_is_not_comparable(env: JSONPathEnvironment) -> None: with pytest.raises(JSONPathTypeError): env.compile("$[?@.* > 2]") def test_unbalanced_parens(env: JSONPathEnvironment) -> None: with pytest.raises(JSONPathSyntaxError): env.compile("$[?((@.foo)]") def test_root_dot(env: JSONPathEnvironment) -> None: with pytest.raises(JSONPathSyntaxError): env.compile("$.") def test_embedded_query_is_not_singular(env: JSONPathEnvironment) -> None: with pytest.raises(JSONPathSyntaxError): env.compile("$.a[$.*]") class FilterLiteralTestCase(NamedTuple): description: str query: str BAD_FILTER_LITERAL_TEST_CASES: List[FilterLiteralTestCase] = [ FilterLiteralTestCase("just true", "$[?true]"), FilterLiteralTestCase("just string", "$[?'foo']"), FilterLiteralTestCase("just int", "$[?2]"), FilterLiteralTestCase("just float", "$[?2.2]"), FilterLiteralTestCase("just null", "$[?null]"), FilterLiteralTestCase("literal and literal", "$[?true && false]"), FilterLiteralTestCase("literal or literal", "$[?true || false]"), FilterLiteralTestCase("comparison and literal", "$[?true == false && false]"), FilterLiteralTestCase("comparison or literal", "$[?true == false || false]"), FilterLiteralTestCase("literal and comparison", "$[?true && true == false]"), FilterLiteralTestCase("literal or comparison", "$[?false || true == false]"), ] @pytest.mark.parametrize( "case", BAD_FILTER_LITERAL_TEST_CASES, ids=attrgetter("description") ) def test_filter_literals_must_be_compared( env: JSONPathEnvironment, case: FilterLiteralTestCase ) -> None: with pytest.raises(JSONPathSyntaxError): env.compile(case.query) def test_recursive_data() -> None: class MockEnv(JSONPathEnvironment): nondeterministic = False env = MockEnv() query = "$..a" arr: List[Any] = [] data: Any = {"foo": arr} arr.append(data) with pytest.raises(JSONPathRecursionError): env.findall(query, data) def test_low_recursion_limit() -> None: class MockEnv(JSONPathEnvironment): max_recursion_depth = 3 env = MockEnv() query = "$..a" data = {"foo": [{"bar": [1, 2, 3]}]} with pytest.raises(JSONPathRecursionError): env.findall(query, data) jg-rp-python-jsonpath-830094f/tests/test_filter_context.py000066400000000000000000000010251512714264000237570ustar00rootroot00000000000000"""Extra filter context test cases.""" import pytest from jsonpath import JSONPathEnvironment @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() def test_filter_context_selector_in_filter_function(env: JSONPathEnvironment) -> None: """Test that we can pass extra filter context to findall.""" rv = env.findall( "$[?(@.some == length(_.other))]", {"foo": {"some": 1, "thing": 2}}, filter_context={"other": ["a"]}, ) assert rv == [{"some": 1, "thing": 2}] jg-rp-python-jsonpath-830094f/tests/test_filter_expression_caching.py000066400000000000000000000151361512714264000261560ustar00rootroot00000000000000"""Filter expression caching test cases.""" from unittest import mock from jsonpath import JSONPath from jsonpath import JSONPathEnvironment from jsonpath.filter import BaseExpression from jsonpath.filter import CachingFilterExpression from jsonpath.filter import FilterContextPath from jsonpath.filter import FilterExpression from jsonpath.filter import InfixExpression from jsonpath.filter import IntegerLiteral from jsonpath.filter import RelativeFilterQuery from jsonpath.filter import RootFilterQuery from jsonpath.segments import JSONPathChildSegment from jsonpath.selectors import Filter as FilterSelector def test_cache_root_path() -> None: """Test that we wrap root paths in a caching node.""" env = JSONPathEnvironment() path = env.compile("$.some[?@.a < $.thing].a") assert isinstance(path, JSONPath) segment = path.segments[1] assert isinstance(segment, JSONPathChildSegment) filter_selector = segment.selectors[0] assert isinstance(filter_selector, FilterSelector) assert filter_selector.cacheable_nodes is True # The original expression tree without caching nodes. expr: BaseExpression = filter_selector.expression assert isinstance(expr, FilterExpression) expr = expr.expression assert isinstance(expr, InfixExpression) assert isinstance(expr.left, RelativeFilterQuery) assert isinstance(expr.right, RootFilterQuery) # A caching copy of the original expression tree. expr = filter_selector.expression.cache_tree() assert isinstance(expr, FilterExpression) expr = expr.expression assert isinstance(expr, InfixExpression) assert isinstance(expr.left, RelativeFilterQuery) assert isinstance(expr.right, CachingFilterExpression) assert isinstance(expr.right._expr, RootFilterQuery) # noqa: SLF001 def test_root_path_cache() -> None: """Test that we evaluate root paths once when caching is enabled.""" env = JSONPathEnvironment(filter_caching=True) data = {"some": [{"a": 1}, {"a": 99}, {"a": 2}, {"a": 3}]} with mock.patch( "jsonpath.filter.RootFilterQuery.evaluate", return_value=10 ) as mock_root_path: path = env.compile("$.some[?@.a < $.thing].a") rv = path.findall(data) assert rv == [1, 2, 3] assert mock_root_path.call_count == 1 def test_root_path_no_cache() -> None: """Test that we evaluate root paths once for each match when caching is disabled.""" env = JSONPathEnvironment(filter_caching=False) data = {"some": [{"a": 1}, {"a": 99}, {"a": 2}, {"a": 3}]} with mock.patch( "jsonpath.filter.RootFilterQuery.evaluate", return_value=10 ) as mock_root_path: path = env.compile("$.some[?@.a < $.thing].a") rv = path.findall(data) assert rv == [1, 2, 3] assert mock_root_path.call_count == 4 # noqa: PLR2004 def test_cache_context_path() -> None: """Test that we wrap filter context paths in a caching node.""" env = JSONPathEnvironment() path = env.compile("$.some[?_.thing > @.a].a") assert isinstance(path, JSONPath) segment = path.segments[1] assert isinstance(segment, JSONPathChildSegment) filter_selector = segment.selectors[0] assert isinstance(filter_selector, FilterSelector) assert filter_selector.cacheable_nodes is True # The original expression tree without caching nodes. expr: BaseExpression = filter_selector.expression assert isinstance(expr, FilterExpression) expr = expr.expression assert isinstance(expr, InfixExpression) assert isinstance(expr.left, FilterContextPath) assert isinstance(expr.right, RelativeFilterQuery) # A caching copy of the original expression tree. expr = filter_selector.expression.cache_tree() assert isinstance(expr, FilterExpression) expr = expr.expression assert isinstance(expr, InfixExpression) assert isinstance(expr.left, CachingFilterExpression) assert isinstance(expr.left._expr, FilterContextPath) # noqa: SLF001 assert isinstance(expr.right, RelativeFilterQuery) def test_context_path_cache() -> None: """Test that we evaluate filter context paths once when caching is enabled.""" env = JSONPathEnvironment(filter_caching=True) data = {"some": [{"a": 1}, {"a": 99}, {"a": 2}, {"a": 3}]} with mock.patch( "jsonpath.filter.FilterContextPath.evaluate", return_value=10 ) as mock_root_path: path = env.compile("$.some[?_.thing > @.a].a") rv = path.findall(data) assert rv == [1, 2, 3] assert mock_root_path.call_count == 1 def test_context_path_no_cache() -> None: """Test that we evaluate context path for each match when caching is disabled.""" env = JSONPathEnvironment(filter_caching=False) data = {"some": [{"a": 1}, {"a": 99}, {"a": 2}, {"a": 3}]} with mock.patch( "jsonpath.filter.FilterContextPath.evaluate", return_value=10 ) as mock_root_path: path = env.compile("$.some[?_.thing > @.a].a") rv = path.findall(data) assert rv == [1, 2, 3] assert mock_root_path.call_count == 4 # noqa: PLR2004 def test_cache_expires() -> None: """Test that the cache expires between calls to findall/finditer.""" env = JSONPathEnvironment(filter_caching=True) path = env.compile("$.some.thing[?@.other < $.foo]") some_data = { "some": {"thing": [{"other": 1}, {"other": 2}, {"other": 3}]}, "foo": 10, } other_data = { "some": {"thing": [{"other": 1}, {"other": 2}, {"other": 3}]}, "foo": 1, } assert path.findall(some_data) == [{"other": 1}, {"other": 2}, {"other": 3}] assert path.findall(other_data) == [] def test_uncacheable_filter() -> None: """Test that we don't waste time caching uncacheable expressions.""" env = JSONPathEnvironment(filter_caching=True) path = env.compile("$.some[?@.a > 2 and @.b < 4].a") assert isinstance(path, JSONPath) segment = path.segments[1] assert isinstance(segment, JSONPathChildSegment) filter_selector = segment.selectors[0] assert isinstance(filter_selector, FilterSelector) assert filter_selector.cacheable_nodes is False # The original expression tree without caching nodes. expr: BaseExpression = filter_selector.expression assert isinstance(expr, FilterExpression) expr = expr.expression assert isinstance(expr, InfixExpression) assert isinstance(expr.left, InfixExpression) assert isinstance(expr.right, InfixExpression) assert isinstance(expr.left.left, RelativeFilterQuery) assert isinstance(expr.left.right, IntegerLiteral) assert isinstance(expr.right.left, RelativeFilterQuery) assert isinstance(expr.right.right, IntegerLiteral) jg-rp-python-jsonpath-830094f/tests/test_find_reference.py000066400000000000000000000361131512714264000236720ustar00rootroot00000000000000"""Test cases from the original article by Stefan Gössner. See https://goessner.net/articles/JsonPath/ """ import asyncio import dataclasses import operator from typing import Any from typing import List from typing import Mapping from typing import Sequence from typing import Union import pytest from jsonpath import JSONPathEnvironment @dataclasses.dataclass class Case: description: str path: str data: Union[Sequence[Any], Mapping[str, Any]] want: Union[Sequence[Any], Mapping[str, Any]] REFERENCE_DATA = { "store": { "book": [ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, }, { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, }, ], "bicycle": {"color": "red", "price": 19.95}, } } TEST_CASES = [ Case( description="(reference) authors of all books in store", path="$.store.book[*].author", data=REFERENCE_DATA, want=["Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"], ), Case( description="(reference) all authors", path="$..author", data=REFERENCE_DATA, want=["Nigel Rees", "Evelyn Waugh", "Herman Melville", "J. R. R. Tolkien"], ), Case( description="(reference) all store items", path="$.store.*", data=REFERENCE_DATA, want=[ [ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, }, { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, }, ], {"color": "red", "price": 19.95}, ], ), Case( description="(reference) prices of all store items", path="$.store..price", data=REFERENCE_DATA, want=[8.95, 12.99, 8.99, 22.99, 19.95], ), Case( description="(reference) the third book", path="$..book[2]", data=REFERENCE_DATA, want=[ { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, } ], ), Case( description="(reference) the last book", path="$..book[-1:]", data=REFERENCE_DATA, want=[ { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, } ], ), Case( description="(reference) the first two books", path="$..book[0,1]", data=REFERENCE_DATA, want=[ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, }, ], ), Case( description="(reference) the first two books slice notation", path="$..book[:2]", data=REFERENCE_DATA, want=[ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, }, ], ), Case( description="(reference) filter books with ISBN number", path="$..book[?(@.isbn)]", data=REFERENCE_DATA, want=[ { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, }, ], ), Case( description="(reference) filter books cheaper than 10", path="$..book[?(@.price<10)]", data=REFERENCE_DATA, want=[ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, }, ], ), # Case( # description="root descent", # path="$..", # data=REFERENCE_DATA, # want=[ # { # "store": { # "book": [ # { # "category": "reference", # "author": "Nigel Rees", # "title": "Sayings of the Century", # "price": 8.95, # }, # { # "category": "fiction", # "author": "Evelyn Waugh", # "title": "Sword of Honour", # "price": 12.99, # }, # { # "category": "fiction", # "author": "Herman Melville", # "title": "Moby Dick", # "isbn": "0-553-21311-3", # "price": 8.99, # }, # { # "category": "fiction", # "author": "J. R. R. Tolkien", # "title": "The Lord of the Rings", # "isbn": "0-395-19395-8", # "price": 22.99, # }, # ], # "bicycle": {"color": "red", "price": 19.95}, # } # }, # { # "book": [ # { # "category": "reference", # "author": "Nigel Rees", # "title": "Sayings of the Century", # "price": 8.95, # }, # { # "category": "fiction", # "author": "Evelyn Waugh", # "title": "Sword of Honour", # "price": 12.99, # }, # { # "category": "fiction", # "author": "Herman Melville", # "title": "Moby Dick", # "isbn": "0-553-21311-3", # "price": 8.99, # }, # { # "category": "fiction", # "author": "J. R. R. Tolkien", # "title": "The Lord of the Rings", # "isbn": "0-395-19395-8", # "price": 22.99, # }, # ], # "bicycle": {"color": "red", "price": 19.95}, # }, # [ # { # "category": "reference", # "author": "Nigel Rees", # "title": "Sayings of the Century", # "price": 8.95, # }, # { # "category": "fiction", # "author": "Evelyn Waugh", # "title": "Sword of Honour", # "price": 12.99, # }, # { # "category": "fiction", # "author": "Herman Melville", # "title": "Moby Dick", # "isbn": "0-553-21311-3", # "price": 8.99, # }, # { # "category": "fiction", # "author": "J. R. R. Tolkien", # "title": "The Lord of the Rings", # "isbn": "0-395-19395-8", # "price": 22.99, # }, # ], # { # "category": "reference", # "author": "Nigel Rees", # "title": "Sayings of the Century", # "price": 8.95, # }, # { # "category": "fiction", # "author": "Evelyn Waugh", # "title": "Sword of Honour", # "price": 12.99, # }, # { # "category": "fiction", # "author": "Herman Melville", # "title": "Moby Dick", # "isbn": "0-553-21311-3", # "price": 8.99, # }, # { # "category": "fiction", # "author": "J. R. R. Tolkien", # "title": "The Lord of the Rings", # "isbn": "0-395-19395-8", # "price": 22.99, # }, # {"color": "red", "price": 19.95}, # ], # ), Case( description="(reference) all elements", path="$..*", data=REFERENCE_DATA, want=[ { "book": [ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, }, { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, }, ], "bicycle": {"color": "red", "price": 19.95}, }, [ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, }, { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, }, ], {"color": "red", "price": 19.95}, { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, }, { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, }, "reference", "Nigel Rees", "Sayings of the Century", 8.95, "fiction", "Evelyn Waugh", "Sword of Honour", 12.99, "fiction", "Herman Melville", "Moby Dick", "0-553-21311-3", 8.99, "fiction", "J. R. R. Tolkien", "The Lord of the Rings", "0-395-19395-8", 22.99, "red", 19.95, ], ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_find(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) assert path.findall(case.data) == case.want @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_find_async(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) async def coro() -> List[object]: return await path.findall_async(case.data) assert asyncio.run(coro()) == case.want jg-rp-python-jsonpath-830094f/tests/test_fluent_api.py000066400000000000000000000227711512714264000230670ustar00rootroot00000000000000"""Test cases for the fluent API.""" import pytest from jsonpath import JSONPathMatch from jsonpath import JSONPointer from jsonpath import compile # noqa: A004 from jsonpath import query def test_iter_query() -> None: """Test that `query` result is iterable, just like `finditer`.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}) for i, match in enumerate(it): assert match.value == i assert [m.obj for m in query("$.some.*", {"some": [0, 1, 2, 3]})] == [0, 1, 2, 3] def test_query_values() -> None: """Test that we can get an iterable of values from a query.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).values() assert list(it) == [0, 1, 2, 3] def test_query_locations() -> None: """Test that we can get an iterable of paths from a query.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).locations() assert list(it) == [ "$['some'][0]", "$['some'][1]", "$['some'][2]", "$['some'][3]", ] def test_query_items() -> None: """Test that we can get an iterable of values and paths from a query.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).items() assert list(it) == [ ("$['some'][0]", 0), ("$['some'][1]", 1), ("$['some'][2]", 2), ("$['some'][3]", 3), ] def test_query_skip() -> None: """Test that we can skip matches from the start of a query iterable.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).skip(2) matches = list(it) assert len(matches) == 2 # noqa: PLR2004 assert [m.obj for m in matches] == [2, 3] def test_query_skip_zero() -> None: """Test that we can skip zero matches from the start of a query iterable.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).skip(0) matches = list(it) assert len(matches) == 4 # noqa: PLR2004 assert [m.obj for m in matches] == [0, 1, 2, 3] def test_query_skip_negative() -> None: """Test that we get an exception when skipping a negative value.""" with pytest.raises(ValueError, match="can't drop a negative number of matches"): query("$.some.*", {"some": [0, 1, 2, 3]}).skip(-1) def test_query_skip_all() -> None: """Test that we can skip all matches from the start of a query iterable.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).skip(4) matches = list(it) assert len(matches) == 0 # noqa: PLR2004 assert [m.obj for m in matches] == [] def test_query_skip_more() -> None: """Test that we can skip more results than there are matches.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).skip(5) matches = list(it) assert len(matches) == 0 # noqa: PLR2004 assert [m.obj for m in matches] == [] def test_query_drop() -> None: """Test that we can skip matches with `drop`.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).drop(2) matches = list(it) assert len(matches) == 2 # noqa: PLR2004 assert [m.obj for m in matches] == [2, 3] def test_query_limit() -> None: """Test that we can limit the number of matches.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).limit(2) matches = list(it) assert len(matches) == 2 # noqa: PLR2004 assert [m.obj for m in matches] == [0, 1] def test_query_limit_zero() -> None: """Test that we can call limit with zero.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).limit(0) matches = list(it) assert len(matches) == 0 # noqa: PLR2004 assert [m.obj for m in matches] == [] def test_query_limit_more() -> None: """Test that we can give limit a number greater than the number of matches.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).limit(5) matches = list(it) assert len(matches) == 4 # noqa: PLR2004 assert [m.obj for m in matches] == [0, 1, 2, 3] def test_query_limit_all() -> None: """Test limit is number of matches.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).limit(4) matches = list(it) assert len(matches) == 4 # noqa: PLR2004 assert [m.obj for m in matches] == [0, 1, 2, 3] def test_query_limit_negative() -> None: """Test that we get an exception if limit is negative.""" with pytest.raises(ValueError, match="can't limit by a negative number of matches"): query("$.some.*", {"some": [0, 1, 2, 3]}).limit(-1) def test_query_head() -> None: """Test that we can limit the number of matches with `head`.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).head(2) matches = list(it) assert len(matches) == 2 # noqa: PLR2004 assert [m.obj for m in matches] == [0, 1] def test_query_first() -> None: """Test that we can limit the number of matches with `first`.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).first(2) matches = list(it) assert len(matches) == 2 # noqa: PLR2004 assert [m.obj for m in matches] == [0, 1] def test_query_tail() -> None: """Test that we can get the last _n_ matches.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).tail(2) matches = list(it) assert len(matches) == 2 # noqa: PLR2004 assert [m.obj for m in matches] == [2, 3] def test_query_tail_zero() -> None: """Test that we can call `tail` with zero.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).tail(0) matches = list(it) assert len(matches) == 0 # noqa: PLR2004 assert [m.obj for m in matches] == [] def test_query_tail_all() -> None: """Test tail is the same as the number of matches.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).tail(4) matches = list(it) assert len(matches) == 4 # noqa: PLR2004 assert [m.obj for m in matches] == [0, 1, 2, 3] def test_query_tail_more() -> None: """Test tail is more than the number of matches.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).tail(5) matches = list(it) assert len(matches) == 4 # noqa: PLR2004 assert [m.obj for m in matches] == [0, 1, 2, 3] def test_query_tail_negative() -> None: """Test that we get an exception if tail is given a negative integer.""" with pytest.raises(ValueError, match="can't select a negative number of matches"): query("$.some.*", {"some": [0, 1, 2, 3]}).tail(-1) def test_query_last() -> None: """Test that we can get the last _n_ matches with `last`.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}).last(2) matches = list(it) assert len(matches) == 2 # noqa: PLR2004 assert [m.obj for m in matches] == [2, 3] def test_query_first_one() -> None: """Test that we can get the first match from a query iterator.""" maybe_match = query("$.some.*", {"some": [0, 1, 2, 3]}).first_one() assert isinstance(maybe_match, JSONPathMatch) assert maybe_match.value == 0 def test_query_first_one_of_empty_iterator() -> None: """Test that `first_one` returns `None` if the iterator is empty.""" maybe_match = query("$.nosuchthing.*", {"some": [0, 1, 2, 3]}).first_one() assert maybe_match is None def test_query_one() -> None: """Test that we can get the first match from a query iterator with `one`.""" maybe_match = query("$.some.*", {"some": [0, 1, 2, 3]}).one() assert isinstance(maybe_match, JSONPathMatch) assert maybe_match.value == 0 def test_query_last_one() -> None: """Test that we can get the last match from a query iterator.""" maybe_match = query("$.some.*", {"some": [0, 1, 2, 3]}).last_one() assert isinstance(maybe_match, JSONPathMatch) assert maybe_match.value == 3 # noqa: PLR2004 def test_query_last_of_empty_iterator() -> None: """Test that `last_one` returns `None` if the iterator is empty.""" maybe_match = query("$.nosuchthing.*", {"some": [0, 1, 2, 3]}).last_one() assert maybe_match is None def test_query_tee() -> None: """Test that we can tee a query iterator.""" it1, it2 = query("$.some.*", {"some": [0, 1, 2, 3]}).tee() rv1 = it1.skip(1).one() assert rv1 is not None assert rv1.value == 1 rv2 = it2.skip(2).one() assert rv2 is not None assert rv2.value == 2 # noqa: PLR2004 def test_query_pointers() -> None: """Test that we can get pointers from a query.""" pointers = list(query("$.some.*", {"some": [0, 1, 2, 3]}).pointers()) assert len(pointers) == 4 # noqa: PLR2004 assert pointers[0] == JSONPointer("/some/0") def test_query_take() -> None: """Test that we can take matches from a query iterable.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}) head = list(it.take(2).values()) assert len(head) == 2 # noqa: PLR2004 assert head == [0, 1] assert list(it.values()) == [2, 3] def test_query_take_all() -> None: """Test that we can take all matches from a query iterable.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}) head = list(it.take(4).values()) assert len(head) == 4 # noqa: PLR2004 assert head == [0, 1, 2, 3] assert list(it.values()) == [] def test_query_take_more() -> None: """Test that we can take more matches than there are nodes.""" it = query("$.some.*", {"some": [0, 1, 2, 3]}) head = list(it.take(5).values()) assert len(head) == 4 # noqa: PLR2004 assert head == [0, 1, 2, 3] assert list(it.values()) == [] def test_query_from_compiled_path() -> None: """Test that we can get a query iterator from a compiled path.""" path = compile("$.some.*") it = path.query({"some": [0, 1, 2, 3]}).values() assert list(it) == [0, 1, 2, 3] def test_query_from_compiled_compound_path() -> None: """Test that we can get a query iterator from a compiled path.""" path = compile("$.some[0] | $.some[2]") it = path.query({"some": [0, 1, 2, 3]}).values() assert list(it) == [0, 2] jg-rp-python-jsonpath-830094f/tests/test_ietf.py000066400000000000000000000305621512714264000216650ustar00rootroot00000000000000"""Test cases from examples in draft-ietf-jsonpath-base-11. The test cases defined here are taken from version 11 of the JSONPath internet draft, draft-ietf-jsonpath-base-11. In accordance with https://trustee.ietf.org/license-info, Revised BSD License text is included bellow. See https://datatracker.ietf.org/doc/html/draft-ietf-jsonpath-base-11 Copyright (c) 2023 IETF Trust and the persons identified as authors of the code. All rights reserved.Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Internet Society, IETF or IETF Trust, nor the names of specific contributors, may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import asyncio import dataclasses import operator from typing import Any from typing import List from typing import Mapping from typing import Sequence from typing import Union import pytest from jsonpath import JSONPathEnvironment @dataclasses.dataclass class Case: description: str path: str data: Union[Sequence[Any], Mapping[str, Any]] want: Union[Sequence[Any], Mapping[str, Any]] FILTER_SELECTOR_DATA = { "a": [3, 5, 1, 2, 4, 6, {"b": "j"}, {"b": "k"}, {"b": {}}, {"b": "kilo"}], "o": {"p": 1, "q": 2, "r": 3, "s": 5, "t": {"u": 6}}, "e": "f", } TEST_CASES = [ Case(description="root", path="$", data={"k": "v"}, want=[{"k": "v"}]), Case( description="name selector - named value in nested object (single quote)", path="$.o['j j']['k.k']", data={"o": {"j j": {"k.k": 3}}, "'": {"@": 2}}, want=[3], ), Case( description="name selector - named value in nested object (double quote)", path='$.o["j j"]["k.k"]', data={"o": {"j j": {"k.k": 3}}, "'": {"@": 2}}, want=[3], ), Case( description="name selector - unusual member names", path='$["\'"]["@"]', data={"o": {"j j": {"k.k": 3}}, "'": {"@": 2}}, want=[2], ), Case( description="wildcard selector - object values", path="$[*]", data={"o": {"j": 1, "k": 2}, "a": [5, 3]}, want=[{"j": 1, "k": 2}, [5, 3]], ), Case( description="wildcard selector - object values (dot property)", path="$.o[*]", data={"o": {"j": 1, "k": 2}, "a": [5, 3]}, want=[1, 2], ), Case( description="wildcard selector - double wild", path="$.o[*, *]", data={"o": {"j": 1, "k": 2}, "a": [5, 3]}, want=[1, 2, 1, 2], ), Case( description="wildcard selector - dot property wild", path="$.a[*]", data={"o": {"j": 1, "k": 2}, "a": [5, 3]}, want=[5, 3], ), Case( description="index selector - element of array", path="$[1]", data=["a", "b"], want=["b"], ), Case( description="index selector - element of array, from the end", path="$[-2]", data=["a", "b"], want=["a"], ), Case( description="array slice selector - slice with default step", path="$[1:3]", data=["a", "b", "c", "d", "e", "f", "g"], want=["b", "c"], ), Case( description="array slice selector - slice with no end index", path="$[5:]", data=["a", "b", "c", "d", "e", "f", "g"], want=["f", "g"], ), Case( description="array slice selector - slice with negative step", path="$[5:1:-2]", data=["a", "b", "c", "d", "e", "f", "g"], want=["f", "d"], ), Case( description="array slice selector - slice in reverse order", path="$[::-1]", data=["a", "b", "c", "d", "e", "f", "g"], want=["g", "f", "e", "d", "c", "b", "a"], ), Case( description="filter selector - Member value comparison", path="$.a[?(@.b == 'kilo')]", data=FILTER_SELECTOR_DATA, want=[{"b": "kilo"}], ), Case( description="filter selector - Array value comparison", path="$.a[?(@>3.5)]", data=FILTER_SELECTOR_DATA, want=[5, 4, 6], ), Case( description="filter selector - Array value existence", path="$.a[?(@.b)]", data=FILTER_SELECTOR_DATA, want=[{"b": "j"}, {"b": "k"}, {"b": {}}, {"b": "kilo"}], ), Case( description="filter selector - Existence of non-singular queries", path="$[?(@.*)]", data=FILTER_SELECTOR_DATA, want=[ [3, 5, 1, 2, 4, 6, {"b": "j"}, {"b": "k"}, {"b": {}}, {"b": "kilo"}], {"p": 1, "q": 2, "r": 3, "s": 5, "t": {"u": 6}}, ], ), Case( description="filter selector - Nested filters", path="$[?(@[?(@.b)])]", data=FILTER_SELECTOR_DATA, want=[[3, 5, 1, 2, 4, 6, {"b": "j"}, {"b": "k"}, {"b": {}}, {"b": "kilo"}]], ), Case( description="filter selector - Array value logical OR", path='$.a[?(@<2 || @.b == "k")]', data=FILTER_SELECTOR_DATA, want=[1, {"b": "k"}], ), Case( description="filter selector - Array value regular expression match", path='$.a[?match(@.b, "[jk]")]', data=FILTER_SELECTOR_DATA, want=[{"b": "j"}, {"b": "k"}], ), Case( description="filter selector - Array value regular expression search", path='$.a[?search(@.b, "[jk]")]', data=FILTER_SELECTOR_DATA, want=[{"b": "j"}, {"b": "k"}, {"b": "kilo"}], ), Case( description="filter selector - Object value logical AND", path="$.o[?(@>1 && @<4)]", data=FILTER_SELECTOR_DATA, want=[2, 3], ), Case( description="filter selector - Object value logical OR", path="$.o[?(@.u || @.x)]", data=FILTER_SELECTOR_DATA, want=[{"u": 6}], ), Case( description="filter selector - Comparison of queries with no values", path="$.a[?(@.b == $.x)]", data=FILTER_SELECTOR_DATA, want=[3, 5, 1, 2, 4, 6], ), Case( description=( "filter selector - Comparisons of primitive and of structured values" ), path="$.a[?(@ == @)]", data=FILTER_SELECTOR_DATA, want=[3, 5, 1, 2, 4, 6, {"b": "j"}, {"b": "k"}, {"b": {}}, {"b": "kilo"}], ), Case( description=("child segment - Indices"), path="$[0, 3]", data=["a", "b", "c", "d", "e", "f", "g"], want=["a", "d"], ), Case( description=("child segment - Slice and index"), path="$[0:2, 5]", data=["a", "b", "c", "d", "e", "f", "g"], want=["a", "b", "f"], ), Case( description=("child segment - Duplicated entries"), path="$[0, 0]", data=["a", "b", "c", "d", "e", "f", "g"], want=["a", "a"], ), Case( description=("descendant segment - Object values"), path="$..j", data={"o": {"j": 1, "k": 2}, "a": [5, 3, [{"j": 4}, {"k": 6}]]}, want=[1, 4], ), Case( description=("descendant segment - Array values"), path="$..[0]", data={"o": {"j": 1, "k": 2}, "a": [5, 3, [{"j": 4}, {"k": 6}]]}, want=[5, {"j": 4}], ), Case( description=("descendant segment - All values"), path="$..[*]", data={"o": {"j": 1, "k": 2}, "a": [5, 3, [{"j": 4}, {"k": 6}]]}, want=[ {"j": 1, "k": 2}, [5, 3, [{"j": 4}, {"k": 6}]], 1, 2, 5, 3, [{"j": 4}, {"k": 6}], {"j": 4}, {"k": 6}, 4, 6, ], ), Case( description=("descendant segment - Input value is visited"), path="$..o", data={"o": {"j": 1, "k": 2}, "a": [5, 3, [{"j": 4}, {"k": 6}]]}, want=[{"j": 1, "k": 2}], ), Case( description=("descendant segment - Multiple segments"), path="$.a..[0, 1]", data={"o": {"j": 1, "k": 2}, "a": [5, 3, [{"j": 4}, {"k": 6}]]}, want=[5, 3, {"j": 4}, {"k": 6}], ), Case( description=("null semantics - Object value"), path="$.a", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[None], ), Case( description=("null semantics - null used as array"), path="$.a[0]", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[], ), Case( description=("null semantics - null used as object"), path="$.a.d", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[], ), Case( description=("null semantics - Array value"), path="$.b[0]", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[None], ), Case( description=("null semantics - Array value wild"), path="$.b[*]", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[None], ), Case( description=("null semantics - Existence"), path="$.b[?(@)]", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[None], ), Case( description=("null semantics - Comparison"), path="$.b[?(@==null)]", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[None], ), Case( description=("null semantics - Comparison with 'missing' value"), path="$.c[?(@.d==null)]", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[], ), Case( description=( "null semantics - Not JSON null at all, just a member name string" ), path="$.null", data={"a": None, "b": [None], "c": [{}], "null": 1}, want=[1], ), Case( description=("filter, length function, string data"), path="$[?(length(@.a)>=2)]", data=[{"a": "ab"}, {"a": "d"}], want=[{"a": "ab"}], ), Case( description=("filter, length function, array data"), path="$[?(length(@.a)>=2)]", data=[{"a": [1, 2, 3]}, {"a": [1]}], want=[{"a": [1, 2, 3]}], ), Case( description=("filter, length function, missing data"), path="$[?(length(@.a)>=2)]", data=[{"d": "f"}], want=[], ), Case( description=("filter, count function"), path="$[?(count(@..*)>2)]", data=[{"a": [1, 2, 3]}, {"a": [1], "d": "f"}, {"a": 1, "d": "f"}], want=[{"a": [1, 2, 3]}, {"a": [1], "d": "f"}], ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_find_ieft(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) assert path.findall(case.data) == case.want @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_find_ieft_async(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) async def coro() -> List[object]: return await path.findall_async(case.data) assert asyncio.run(coro()) == case.want @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_hash_path(env: JSONPathEnvironment, case: Case) -> None: """Test that paths are hashable.""" hash(env.compile(case.path)) jg-rp-python-jsonpath-830094f/tests/test_ietf_comparison.py000066400000000000000000000155041512714264000241160ustar00rootroot00000000000000"""Default filter expression comparison test cases from examples in draft-ietf-jsonpath-base-11. The test cases defined here are taken from version 11 of the JSONPath internet draft, draft-ietf-jsonpath-base-11. In accordance with https://trustee.ietf.org/license-info, Revised BSD License text is included bellow. See https://datatracker.ietf.org/doc/html/draft-ietf-jsonpath-base-11 Copyright (c) 2023 IETF Trust and the persons identified as authors of the code. All rights reserved.Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Internet Society, IETF or IETF Trust, nor the names of specific contributors, may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ # noqa: D205 import dataclasses import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath.filter import UNDEFINED from jsonpath.match import NodeList @dataclasses.dataclass class Case: description: str left: object op: str right: object want: bool DATA = {"obj": {"x": "y"}, "arr": [2, 3]} TEST_CASES = [ Case( description="$.absent1 == $.absent2", left=UNDEFINED, op="==", right=UNDEFINED, want=True, ), Case( description="$.absent1 == $.absent2, empty node lists", left=NodeList(), op="==", right=NodeList(), want=True, ), Case( description="$.absent1 == $.absent2, empty node list and undefined", left=NodeList(), op="==", right=UNDEFINED, want=True, ), Case( description="$.absent1 == $.absent2, undefined and empty node list", left=UNDEFINED, op="==", right=NodeList(), want=True, ), Case( description="$.absent1 <= $.absent2", left=UNDEFINED, op="<=", right=UNDEFINED, want=True, ), Case( description="$.absent == 'g'", left=UNDEFINED, op="==", right="g", want=False, ), Case( description="$.absent1 != $.absent2", left=UNDEFINED, op="!=", right=UNDEFINED, want=False, ), Case( description="$.absent != 'g'", left=UNDEFINED, op="!=", right="g", want=True, ), Case( description="1 <= 2", left=1, op="<=", right=2, want=True, ), Case( description="1 > 2", left=1, op=">", right=2, want=False, ), Case( description="13 == '13'", left=13, op="==", right="13", want=False, ), Case( description="'a' <= 'b'", left="a", op="<=", right="b", want=True, ), Case( description="'a' > 'b'", left="a", op=">", right="b", want=False, ), Case( description="$.obj == $.arr", left=DATA["obj"], op="==", right=DATA["arr"], want=False, ), Case( description="$.obj != $.arr", left=DATA["obj"], op="!=", right=DATA["arr"], want=True, ), Case( description="$.obj == $.obj", left=DATA["obj"], op="==", right=DATA["obj"], want=True, ), Case( description="$.obj != $.obj", left=DATA["obj"], op="!=", right=DATA["obj"], want=False, ), Case( description="$.arr == $.arr", left=DATA["arr"], op="==", right=DATA["arr"], want=True, ), Case( description="$.arr != $.arr", left=DATA["arr"], op="!=", right=DATA["arr"], want=False, ), Case( description="$.arr == 17", left=DATA["arr"], op="==", right=17, want=False, ), Case( description="$.arr != 17", left=DATA["arr"], op="!=", right=17, want=True, ), Case( description="$.obj <= $.arr", left=DATA["obj"], op="<=", right=DATA["arr"], want=False, ), Case( description="$.obj < $.arr", left=DATA["obj"], op="<", right=DATA["arr"], want=False, ), Case( description="$.obj <= $.obj", left=DATA["obj"], op="<=", right=DATA["obj"], want=True, ), Case( description="$.arr <= $.arr", left=DATA["arr"], op="<=", right=DATA["arr"], want=True, ), Case( description="1 <= $.arr", left=1, op="<=", right=DATA["arr"], want=False, ), Case( description="1 >= $.arr", left=1, op=">=", right=DATA["arr"], want=False, ), Case( description="1 > $.arr", left=1, op=">", right=DATA["arr"], want=False, ), Case( description="1 < $.arr", left=1, op="<", right=DATA["arr"], want=False, ), Case( description="true <= true", left=True, op="<=", right=True, want=True, ), Case( description="true > true", left=True, op=">", right=True, want=False, ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_compare_ieft(env: JSONPathEnvironment, case: Case) -> None: result = env.compare(case.left, case.op, case.right) assert result == case.want jg-rp-python-jsonpath-830094f/tests/test_ietf_well_typedness.py000066400000000000000000000137631512714264000250120ustar00rootroot00000000000000"""Function well-typedness test derived from IETF spec examples. The test cases defined here are taken from version 11 of the JSONPath internet draft, draft-ietf-jsonpath-base-11. In accordance with https://trustee.ietf.org/license-info, Revised BSD License text is included bellow. See https://datatracker.ietf.org/doc/html/draft-ietf-jsonpath-base-20 Copyright (c) 2023 IETF Trust and the persons identified as authors of the code. All rights reserved.Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Internet Society, IETF or IETF Trust, nor the names of specific contributors, may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ # noqa: D205 import dataclasses import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath.exceptions import JSONPathTypeError from jsonpath.function_extensions import ExpressionType from jsonpath.function_extensions import FilterFunction from jsonpath.match import NodeList @dataclasses.dataclass class Case: description: str path: str valid: bool TEST_CASES = [ Case( description="length, singular query, compared", path="$[?length(@) < 3]", valid=True, ), Case( description="length, non-singular query, compared", path="$[?length(@.*) < 3]", valid=False, ), Case( description="count, non-singular query, compared", path="$[?count(@.*) == 1]", valid=True, ), Case( description="count, int literal, compared", path="$[?count(1) == 1]", valid=False, ), Case( description="nested function, LogicalType -> NodesType", path="$[?count(foo(@.*)) == 1]", valid=True, ), Case( description="match, singular query, string literal", path="$[?match(@.timezone, 'Europe/.*')]", valid=True, ), Case( description="match, singular query, string literal, compared", path="$[?match(@.timezone, 'Europe/.*') == true]", valid=False, ), Case( description="value, non-singular query param, comparison", path="$[?value(@..color) == 'red']", valid=True, ), Case( description="value, non-singular query param", path="$[?value(@..color)]", valid=False, ), Case( description="function, singular query, value type param, logical return type", path="$[?bar(@.a)]", valid=True, ), Case( description=( "function, non-singular query, value type param, logical return type" ), path="$[?bar(@.*)]", valid=False, ), Case( description=( "function, non-singular query, nodes type param, logical return type" ), path="$[?bn(@.*)]", valid=True, ), Case( description=( "function, non-singular query, logical type param, logical return type" ), path="$[?bl(@.*)]", valid=True, ), Case( description="function, logical type param, comparison, logical return type", path="$[?bl(1==1)]", valid=True, ), Case( description="function, logical type param, literal, logical return type", path="$[?bl(1)]", valid=False, ), Case( description="function, value type param, literal, logical return type", path="$[?bar(1)]", valid=True, ), ] class MockFoo(FilterFunction): arg_types = [ExpressionType.NODES] return_type = ExpressionType.NODES def __call__(self, nodes: NodeList) -> NodeList: # noqa: D102 return nodes class MockBar(FilterFunction): arg_types = [ExpressionType.VALUE] return_type = ExpressionType.LOGICAL def __call__(self) -> bool: # noqa: D102 return False class MockBn(FilterFunction): arg_types = [ExpressionType.NODES] return_type = ExpressionType.LOGICAL def __call__(self, _: object) -> bool: # noqa: D102 return False class MockBl(FilterFunction): arg_types = [ExpressionType.LOGICAL] return_type = ExpressionType.LOGICAL def __call__(self, _: object) -> bool: # noqa: D102 return False @pytest.fixture() def env() -> JSONPathEnvironment: environment = JSONPathEnvironment() environment.function_extensions["foo"] = MockFoo() environment.function_extensions["bar"] = MockBar() environment.function_extensions["bn"] = MockBn() environment.function_extensions["bl"] = MockBl() return environment @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_ietf_well_typedness(env: JSONPathEnvironment, case: Case) -> None: if case.valid: env.compile(case.path) else: with pytest.raises(JSONPathTypeError): env.compile(case.path) jg-rp-python-jsonpath-830094f/tests/test_iregexp.py000066400000000000000000000017361512714264000224020ustar00rootroot00000000000000import pytest try: import iregexp_check # noqa: F401 IREGEXP_AVAILABLE = True except ImportError: IREGEXP_AVAILABLE = False import jsonpath @pytest.mark.skipif(IREGEXP_AVAILABLE is False, reason="requires iregexp_check") def test_iregexp_check() -> None: # Character classes are OK. query = "$[?match(@, '[0-9]+')]" data = ["123", "abc", "abc123"] assert jsonpath.findall(query, data) == ["123"] # Multi character escapes are not. query = "$[?match(@, '\\\\d+')]" assert jsonpath.findall(query, data) == [] @pytest.mark.skipif(IREGEXP_AVAILABLE, reason="iregexp_check is available") def test_no_iregexp_check() -> None: # Character classes are OK. query = "$[?match(@, '[0-9]+')]" data = ["123", "abc", "abc123"] assert jsonpath.findall(query, data) == ["123"] # Multi character escapes are OK when iregexp_check is not installed. query = "$[?match(@, '\\\\d+')]" assert jsonpath.findall(query, data) == ["123"] jg-rp-python-jsonpath-830094f/tests/test_isinstance_function.py000066400000000000000000000061231512714264000247770ustar00rootroot00000000000000import asyncio import dataclasses import operator from typing import Any from typing import List from typing import Mapping from typing import Sequence from typing import Union import pytest from jsonpath import JSONPathEnvironment @dataclasses.dataclass class Case: description: str path: str data: Union[Sequence[Any], Mapping[str, Any]] want: Union[Sequence[Any], Mapping[str, Any]] SOME_OBJECT = object() TEST_CASES = [ Case( description="type of a string", path="$.some[?is(@.thing, 'string')]", data={"some": [{"thing": "foo"}]}, want=[{"thing": "foo"}], ), Case( description="not a string", path="$.some[?is(@.thing, 'string')]", data={"some": [{"thing": 1}]}, want=[], ), Case( description="type of undefined", path="$.some[?is(@.other, 'undefined')]", # things without `other` data={"some": [{"thing": "foo"}]}, want=[{"thing": "foo"}], ), Case( description="'missing' is an alias for 'undefined'", path="$.some[?is(@.other, 'missing')]", # things without `other` data={"some": [{"thing": "foo"}]}, want=[{"thing": "foo"}], ), Case( description="type of None", path="$.some[?is(@.thing, 'null')]", data={"some": [{"thing": None}]}, want=[{"thing": None}], ), Case( description="type of array-like", path="$.some[?is(@.thing, 'array')]", data={"some": [{"thing": [1, 2, 3]}]}, want=[{"thing": [1, 2, 3]}], ), Case( description="type of mapping", path="$.some[?is(@.thing, 'object')]", data={"some": [{"thing": {"other": 1}}]}, want=[{"thing": {"other": 1}}], ), Case( description="type of bool", path="$.some[?is(@.thing, 'boolean')]", data={"some": [{"thing": True}]}, want=[{"thing": True}], ), Case( description="type of int", path="$.some[?is(@.thing, 'number')]", data={"some": [{"thing": 1}]}, want=[{"thing": 1}], ), Case( description="type of float", path="$.some[?is(@.thing, 'number')]", data={"some": [{"thing": 1.1}]}, want=[{"thing": 1.1}], ), Case( description="none of the above", path="$.some[?is(@.thing, 'object')]", data={"some": [{"thing": SOME_OBJECT}]}, want=[{"thing": SOME_OBJECT}], ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_isinstance_function(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) assert path.findall(case.data) == case.want @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_isinstance_function_async(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) async def coro() -> List[object]: return await path.findall_async(case.data) assert asyncio.run(coro()) == case.want jg-rp-python-jsonpath-830094f/tests/test_issues.py000066400000000000000000000074121512714264000222470ustar00rootroot00000000000000import pytest from jsonpath import JSONPatch from jsonpath import JSONPatchError from jsonpath import JSONPointerIndexError from jsonpath import findall from jsonpath import pointer def test_issue_72_andy() -> None: query = "andy" data = {"andy": [1, 2, 3]} assert findall(query, data) == [[1, 2, 3]] def test_issue_72_orders() -> None: query = "orders" data = {"orders": [1, 2, 3]} assert findall(query, data) == [[1, 2, 3]] def test_issue_103() -> None: query = "$..book[?(@.borrowers[?(@.name == _.name)])]" data = { "store": { "book": [ { "category": "reference", "author": "Nigel Rees", "title": "Sayings of the Century", "price": 8.95, }, { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, "borrowers": [ {"name": "John", "id": 101}, {"name": "Jane", "id": 102}, ], }, { "category": "fiction", "author": "Herman Melville", "title": "Moby Dick", "isbn": "0-553-21311-3", "price": 8.99, }, { "category": "fiction", "author": "J. R. R. Tolkien", "title": "The Lord of the Rings", "isbn": "0-395-19395-8", "price": 22.99, "borrowers": [{"name": "Peter", "id": 103}], }, ], "bicycle": {"color": "red", "price": 19.95}, } } filter_context = {"name": "John"} want = [ { "category": "fiction", "author": "Evelyn Waugh", "title": "Sword of Honour", "price": 12.99, "borrowers": [{"name": "John", "id": 101}, {"name": "Jane", "id": 102}], } ] assert findall(query, data, filter_context=filter_context) == want def test_quoted_reserved_word_and() -> None: query = "$['and']" data = {"and": [1, 2, 3]} assert findall(query, data) == [[1, 2, 3]] def test_quoted_reserved_word_or() -> None: query = "$['or']" data = {"or": [1, 2, 3]} assert findall(query, data) == [[1, 2, 3]] def test_issue_115() -> None: data = { "users": [ {"name": "Sue", "score": 100}, {"name": "John", "score": 86}, {"name": "Sally", "score": 84}, {"name": "Jane", "score": 55}, ] } assert pointer.resolve("/users/0/score", data) == 100 # noqa: PLR2004 # Negative index with pytest.raises(JSONPointerIndexError): pointer.resolve("/users/-1/score", data) def test_issue_117() -> None: # When the target value is an array of length 2, /foo/2 is the same as /foo/- patch = JSONPatch().add(path="/foo/2", value=99) data = {"foo": ["bar", "baz"]} assert patch.apply(data) == {"foo": ["bar", "baz", 99]} # Array length + 1 raises patch = JSONPatch().add(path="/foo/3", value=99) data = {"foo": ["bar", "baz"]} with pytest.raises(JSONPatchError): patch.apply(data) def test_issue_124() -> None: query_raw = r"$[?@type =~ /studio\/material\/.*/]" query = "$[?@type =~ /studio\\/material\\/.*/]" data = [ {"type": "studio/material/a"}, {"type": "studio/material/b"}, {"type": "studio foo"}, ] want = [{"type": "studio/material/a"}, {"type": "studio/material/b"}] assert findall(query, data) == want assert findall(query_raw, data) == want jg-rp-python-jsonpath-830094f/tests/test_json_patch.py000066400000000000000000000202051512714264000230570ustar00rootroot00000000000000"""JSON Patch test cases.""" import json import re from collections.abc import Mapping from io import StringIO from typing import Any from typing import Iterator import pytest from jsonpath import JSONPatch from jsonpath import patch from jsonpath.exceptions import JSONPatchError class MockMapping(Mapping): # type: ignore def __getitem__(self, __key: Any) -> Any: return "foo" def __iter__(self) -> Iterator[str]: return iter(["foo"]) def __len__(self) -> int: return 1 def test_add_to_immutable_mapping() -> None: patch = JSONPatch().add("/foo/bar", "baz") with pytest.raises( JSONPatchError, match=re.escape("unexpected operation on 'MockMapping' (add:0)") ): patch.apply({"foo": MockMapping()}) def test_remove_root() -> None: patch = JSONPatch().remove("") with pytest.raises(JSONPatchError, match=re.escape("can't remove root (remove:0)")): patch.apply({"foo": "bar"}) def test_remove_nonexistent_value() -> None: patch = JSONPatch().remove("/baz") with pytest.raises( JSONPatchError, match=re.escape("can't remove nonexistent property (remove:0)") ): patch.apply({"foo": "bar"}) def test_remove_array_end() -> None: patch = JSONPatch().remove("/foo/-") with pytest.raises( JSONPatchError, match=re.escape("can't remove nonexistent item (remove:0)") ): patch.apply({"foo": [1, 2, 3]}) def test_remove_from_immutable_mapping() -> None: patch = JSONPatch().remove("/bar/foo") with pytest.raises( JSONPatchError, match=re.escape("unexpected operation on 'MockMapping' (remove:0)"), ): patch.apply({"bar": MockMapping()}) def test_replace_root() -> None: assert patch.apply( [{"op": "replace", "path": "", "value": [1, 2, 3]}], {"foo": "bar"} ) == [1, 2, 3] def test_replace_a_nonexistent_item() -> None: with pytest.raises( JSONPatchError, match=re.escape("can't replace nonexistent item (replace:0)") ): patch.apply( [{"op": "replace", "path": "/foo/99", "value": 5}], {"foo": [1, 2, 3]} ) def test_replace_a_nonexistent_value() -> None: with pytest.raises( JSONPatchError, match=re.escape("can't replace nonexistent property (replace:0)"), ): patch.apply( [{"op": "replace", "path": "/foo/bar", "value": 5}], {"foo": {"baz": 10}} ) def test_replace_immutable_mapping() -> None: with pytest.raises( JSONPatchError, match=re.escape("unexpected operation on 'MockMapping' (replace:0)"), ): patch.apply( [{"op": "replace", "path": "/bar/foo", "value": "baz"}], {"bar": MockMapping()}, ) def test_move_to_child() -> None: with pytest.raises( JSONPatchError, match=re.escape("can't move object to one of its own children (move:0)"), ): patch.apply( [{"op": "move", "from": "/foo/bar", "path": "/foo/bar/baz"}], {"foo": {"bar": {"baz": [1, 2, 3]}}}, ) def test_move_nonexistent_value() -> None: with pytest.raises( JSONPatchError, match=re.escape("source object does not exist (move:0)") ): JSONPatch().move(from_="/foo/bar", path="/bar").apply({"foo": {"baz": 1}}) def test_move_to_root() -> None: patch = JSONPatch().move(from_="/foo", path="") assert patch.apply({"foo": {"bar": "baz"}}) == {"bar": "baz"} def test_move_to_immutable_mapping() -> None: patch = JSONPatch().move(from_="/foo/bar", path="/baz/bar") with pytest.raises( JSONPatchError, match=re.escape("unexpected operation on 'MockMapping' (move:0)"), ): patch.apply({"foo": {"bar": "hello"}, "baz": MockMapping()}) def test_copy_nonexistent_value() -> None: with pytest.raises( JSONPatchError, match=re.escape("source object does not exist (copy:0)") ): JSONPatch().copy(from_="/foo/bar", path="/bar").apply({"foo": {"baz": "hello"}}) def test_copy_to_root() -> None: patch = JSONPatch().copy(from_="/foo/bar", path="") assert patch.apply({"foo": {"bar": [1, 2, 3]}}) == [1, 2, 3] def test_copy_to_immutable_mapping() -> None: with pytest.raises( JSONPatchError, match=re.escape("unexpected operation on 'MockMapping' (copy:0)"), ): JSONPatch().copy(from_="/foo/bar", path="/baz/bar").apply( {"foo": {"bar": [1, 2, 3]}, "baz": MockMapping()} ) def test_patch_from_file_like() -> None: patch_doc = StringIO( json.dumps( [ {"op": "add", "path": "", "value": {"foo": {}}}, {"op": "add", "path": "/foo", "value": {"bar": []}}, {"op": "add", "path": "/foo/bar/-", "value": 1}, ] ) ) patch = JSONPatch(patch_doc) assert patch.apply({}) == {"foo": {"bar": [1]}} def test_patch_from_string() -> None: patch_doc = json.dumps( [ {"op": "add", "path": "", "value": {"foo": {}}}, {"op": "add", "path": "/foo", "value": {"bar": []}}, {"op": "add", "path": "/foo/bar/-", "value": 1}, ] ) patch = JSONPatch(patch_doc) assert patch.apply({}) == {"foo": {"bar": [1]}} def test_unexpected_patch_ops() -> None: with pytest.raises( JSONPatchError, match=re.escape("expected a sequence of patch operations, found 'MockMapping'"), ): JSONPatch(MockMapping()) # type: ignore def test_construct_missing_op() -> None: with pytest.raises(JSONPatchError, match=re.escape("missing 'op' member at op 0")): JSONPatch([{}]) def test_construct_unknown_op() -> None: msg = ( "expected 'op' to be one of 'add', 'remove', 'replace', " "'move', 'copy' or 'test' (foo:0)" ) with pytest.raises(JSONPatchError, match=re.escape(msg)): JSONPatch([{"op": "foo"}]) def test_construct_missing_pointer() -> None: msg = "missing property 'path' (add:0)" with pytest.raises(JSONPatchError, match=re.escape(msg)): JSONPatch([{"op": "add", "value": "foo"}]) def test_construct_missing_value() -> None: msg = "missing property 'value' (add:0)" with pytest.raises(JSONPatchError, match=re.escape(msg)): JSONPatch([{"op": "add", "path": "/foo"}]) def test_construct_pointer_not_a_string() -> None: msg = "expected a JSON Pointer string for 'path', found 'int' (add:0)" with pytest.raises(JSONPatchError, match=re.escape(msg)): JSONPatch([{"op": "add", "path": 5, "value": "foo"}]) def test_apply_to_str() -> None: patch_doc = json.dumps( [ {"op": "add", "path": "", "value": {"foo": {}}}, {"op": "add", "path": "/foo", "value": {"bar": []}}, {"op": "add", "path": "/foo/bar/-", "value": 1}, ] ) data_doc = json.dumps({}) assert patch.apply(patch_doc, data_doc) == {"foo": {"bar": [1]}} def test_apply_to_file_like() -> None: patch_doc = StringIO( json.dumps( [ {"op": "add", "path": "", "value": {"foo": {}}}, {"op": "add", "path": "/foo", "value": {"bar": []}}, {"op": "add", "path": "/foo/bar/-", "value": 1}, ] ) ) data_doc = StringIO(json.dumps({})) assert patch.apply(patch_doc, data_doc) == {"foo": {"bar": [1]}} def test_asdict() -> None: patch_doc = [ {"op": "add", "path": "/foo/bar", "value": "foo"}, {"op": "remove", "path": "/foo/bar"}, {"op": "replace", "path": "/foo/bar", "value": "foo"}, {"op": "move", "from": "/baz/foo", "path": "/foo/bar"}, {"op": "copy", "from": "/baz/foo", "path": "/foo/bar"}, {"op": "test", "path": "/foo/bar", "value": "foo"}, ] patch = JSONPatch(patch_doc) assert patch.asdicts() == patch_doc def test_non_standard_addap_op() -> None: # Index 7 is out of range and would raises a JSONPatchError with the `add` op. patch = JSONPatch().addap(path="/foo/7", value=99) assert patch.apply({"foo": [1, 2, 3]}) == {"foo": [1, 2, 3, 99]} def test_add_to_mapping_with_int_key() -> None: patch = JSONPatch().add(path="/1", value=99) assert patch.apply({"foo": 1}) == {"foo": 1, "1": 99} jg-rp-python-jsonpath-830094f/tests/test_json_patch_rfc6902.py000066400000000000000000000170651512714264000242440ustar00rootroot00000000000000"""Test cases from rfc6902 examples. Most of the test cases defined here are taken from rfc6902. The appropriate Simplified BSD License is included below. Copyright (c) 2013 IETF Trust and the persons identified as authors of the code. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Internet Society, IETF or IETF Trust, nor the names of specific contributors, may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import copy import dataclasses import re from operator import attrgetter from typing import Dict from typing import MutableMapping from typing import MutableSequence from typing import Union import pytest from jsonpath import JSONPatch from jsonpath.exceptions import JSONPatchError from jsonpath.exceptions import JSONPatchTestFailure @dataclasses.dataclass class Case: description: str data: Union[MutableSequence[object], MutableMapping[str, object]] patch: JSONPatch op: Dict[str, object] want: Union[MutableSequence[object], MutableMapping[str, object]] TEST_CASES = [ Case( description="add an object member", data={"foo": "bar"}, patch=JSONPatch().add(path="/baz", value="qux"), op={"op": "add", "path": "/baz", "value": "qux"}, want={"foo": "bar", "baz": "qux"}, ), Case( description="add an array element", data={"foo": ["bar", "baz"]}, patch=JSONPatch().add(path="/foo/1", value="qux"), op={"op": "add", "path": "/foo/1", "value": "qux"}, want={"foo": ["bar", "qux", "baz"]}, ), Case( description="append to an array", data={"foo": ["bar", "baz"]}, patch=JSONPatch().add(path="/foo/-", value="qux"), op={"op": "add", "path": "/foo/-", "value": "qux"}, want={"foo": ["bar", "baz", "qux"]}, ), Case( description="add to the root", data={"foo": "bar"}, patch=JSONPatch().add(path="", value={"some": "thing"}), op={"op": "add", "path": "", "value": {"some": "thing"}}, want={"some": "thing"}, ), Case( description="remove an object member", data={"baz": "qux", "foo": "bar"}, patch=JSONPatch().remove(path="/baz"), op={"op": "remove", "path": "/baz"}, want={"foo": "bar"}, ), Case( description="remove an array element", data={"foo": ["bar", "qux", "baz"]}, patch=JSONPatch().remove(path="/foo/1"), op={"op": "remove", "path": "/foo/1"}, want={"foo": ["bar", "baz"]}, ), Case( description="replace an object member", data={"baz": "qux", "foo": "bar"}, patch=JSONPatch().replace(path="/baz", value="boo"), op={"op": "replace", "path": "/baz", "value": "boo"}, want={"baz": "boo", "foo": "bar"}, ), Case( description="replace an array element", data={"foo": [1, 2, 3]}, patch=JSONPatch().replace(path="/foo/0", value=9), op={"op": "replace", "path": "/foo/0", "value": 9}, want={"foo": [9, 2, 3]}, ), Case( description="move a value", data={"foo": {"bar": "baz", "waldo": "fred"}, "qux": {"corge": "grault"}}, patch=JSONPatch().move(from_="/foo/waldo", path="/qux/thud"), op={"op": "move", "from": "/foo/waldo", "path": "/qux/thud"}, want={"foo": {"bar": "baz"}, "qux": {"corge": "grault", "thud": "fred"}}, ), Case( description="move an array element", data={"foo": ["all", "grass", "cows", "eat"]}, patch=JSONPatch().move(from_="/foo/1", path="/foo/3"), op={"op": "move", "from": "/foo/1", "path": "/foo/3"}, want={"foo": ["all", "cows", "eat", "grass"]}, ), Case( description="copy a value", data={"foo": {"bar": "baz", "waldo": "fred"}, "qux": {"corge": "grault"}}, patch=JSONPatch().copy(from_="/foo/waldo", path="/qux/thud"), op={"op": "copy", "from": "/foo/waldo", "path": "/qux/thud"}, want={ "foo": {"bar": "baz", "waldo": "fred"}, "qux": {"corge": "grault", "thud": "fred"}, }, ), Case( description="copy an array element", data={"foo": ["all", "grass", "cows", "eat"]}, patch=JSONPatch().copy(from_="/foo/1", path="/foo/3"), op={"op": "copy", "path": "/foo/3", "from": "/foo/1"}, want={"foo": ["all", "grass", "cows", "grass", "eat"]}, ), Case( description="test a value", data={"baz": "qux", "foo": ["a", 2, "c"]}, patch=JSONPatch().test(path="/baz", value="qux").test(path="/foo/1", value=2), op={"op": "test", "path": "/baz", "value": "qux"}, want={"baz": "qux", "foo": ["a", 2, "c"]}, ), Case( description="add a nested member object", data={"foo": "bar"}, patch=JSONPatch().add(path="/child", value={"grandchild": {}}), op={"op": "add", "path": "/child", "value": {"grandchild": {}}}, want={"foo": "bar", "child": {"grandchild": {}}}, ), Case( description="add an array value", data={"foo": ["bar"]}, patch=JSONPatch().add(path="/foo/-", value=["abc", "def"]), op={"op": "add", "path": "/foo/-", "value": ["abc", "def"]}, want={"foo": ["bar", ["abc", "def"]]}, ), ] @pytest.mark.parametrize("case", TEST_CASES, ids=attrgetter("description")) def test_rfc6902_examples(case: Case) -> None: assert case.patch.apply(copy.deepcopy(case.data)) == case.want def test_test_op_failure() -> None: patch = JSONPatch().test(path="/baz", value="bar") with pytest.raises(JSONPatchTestFailure, match=re.escape("test failed (test:0)")): patch.apply({"baz": "qux"}) def test_add_to_nonexistent_target() -> None: patch = JSONPatch().add(path="/baz/bat", value="qux") with pytest.raises( JSONPatchError, match=re.escape("pointer key error: 'baz' (add:0)") ): patch.apply({"foo": "bar"}) def test_add_array_index_out_of_range() -> None: patch = JSONPatch().add(path="/foo/7", value=99) with pytest.raises(JSONPatchError, match=re.escape("index out of range (add:0)")): patch.apply({"foo": [1, 2, 3]}) @pytest.mark.parametrize("case", TEST_CASES, ids=attrgetter("description")) def test_json_patch_constructor(case: Case) -> None: patch = JSONPatch([case.op]) assert len(patch.ops) == 1 assert patch.apply(copy.deepcopy(case.data)) == case.want jg-rp-python-jsonpath-830094f/tests/test_json_pointer.py000066400000000000000000000241501512714264000234430ustar00rootroot00000000000000"""JSONPointer test cases.""" from io import StringIO from typing import List from typing import Union import pytest import jsonpath from jsonpath import JSONPointer from jsonpath import JSONPointerError from jsonpath import JSONPointerIndexError from jsonpath import JSONPointerResolutionError from jsonpath import JSONPointerTypeError from jsonpath.pointer import UNDEFINED def test_match_to_pointer() -> None: data = {"some": {"thing": "else"}} matches = list(jsonpath.finditer("$.some.thing", data)) assert len(matches) == 1 match = matches[0] pointer = match.pointer() assert pointer.resolve(data) == match.obj assert pointer.resolve({"some": {"thing": "foo"}}) == "foo" def test_pointer_repr() -> None: data = {"some": {"thing": "else"}} matches = list(jsonpath.finditer("$.some.thing", data)) assert len(matches) == 1 match = matches[0] pointer = match.pointer() assert str(pointer) == "/some/thing" def test_resolve_with_default() -> None: data = {"some": {"thing": "else"}} pointer = JSONPointer("/some/other") assert pointer.resolve(data, default=None) is None def test_pointer_min_int_index() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer(f"/some/thing/{JSONPointer.min_int_index - 1}") with pytest.raises(jsonpath.JSONPointerIndexError): pointer.resolve(data) def test_resolve_int_key() -> None: data = {"some": {"1": "thing"}} pointer = JSONPointer("/some/1") assert pointer.resolve(data) == "thing" def test_resolve_int_missing_key() -> None: data = {"some": {"1": "thing"}} pointer = JSONPointer("/some/2") with pytest.raises(KeyError): pointer.resolve(data) def test_resolve_str_index() -> None: data = {"some": ["a", "b", "c"]} pointer = JSONPointer("/some/1", parts=("some", "1")) assert pointer.resolve(data) == "b" def test_keys_selector() -> None: data = {"some": {"thing": "else"}} matches = list(jsonpath.finditer("$.some.~", data)) assert len(matches) == 1 match = matches[0] pointer = match.pointer() assert str(pointer) == "/some/~0thing" assert pointer.resolve(data) == "thing" def test_mapping_key_error() -> None: data = {"some": {"thing": "else"}} pointer = JSONPointer("/some/other") with pytest.raises(KeyError): pointer.resolve(data) def test_sequence_type_error() -> None: data = {"some": ["a", "b", "c"]} pointer = JSONPointer("/some/thing") with pytest.raises(TypeError): pointer.resolve(data) def test_hyphen_index() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer("/some/thing/-") with pytest.raises(JSONPointerIndexError): pointer.resolve(data) def test_negative_index() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer("/some/thing/-2") with pytest.raises(JSONPointerIndexError): pointer.resolve(data) def test_resolve_with_parent() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer("/some/thing") parent, rv = pointer.resolve_parent(data) assert parent == data["some"] assert rv == data["some"]["thing"] def test_resolve_with_missing_parent() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer("") parent, rv = pointer.resolve_parent(data) assert parent is None assert rv == data def test_resolve_with_missing_target() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer("/some/other") parent, rv = pointer.resolve_parent(data) assert parent == data["some"] assert rv == UNDEFINED def test_resolve_from_json_string() -> None: data = r'{"some": {"thing": [1,2,3]}}' pointer = JSONPointer("/some/thing") assert pointer.resolve(data) == [1, 2, 3] assert pointer.resolve_parent(data) == ({"thing": [1, 2, 3]}, [1, 2, 3]) def test_resolve_from_file_like() -> None: data = StringIO(r'{"some": {"thing": [1,2,3]}}') pointer = JSONPointer("/some/thing") assert pointer.resolve(data) == [1, 2, 3] data.seek(0) assert pointer.resolve_parent(data) == ({"thing": [1, 2, 3]}, [1, 2, 3]) def test_convenience_resolve() -> None: data = {"some": {"thing": [1, 2, 3]}} assert jsonpath.resolve("/some/thing/0", data) == 1 with pytest.raises(JSONPointerResolutionError): jsonpath.resolve("/some/thing/99", data) def test_convenience_resolve_default() -> None: data = {"some": {"thing": [1, 2, 3]}} assert jsonpath.resolve("/some/thing/99", data, default=0) == 0 def test_convenience_resolve_from_parts() -> None: data = {"some": {"thing": [1, 2, 3]}} assert jsonpath.resolve(["some", "thing", "0"], data) == 1 with pytest.raises(JSONPointerResolutionError): jsonpath.resolve(["some", "thing", "99"], data) def test_convenience_resolve_default_from_parts() -> None: data = {"some": {"thing": [1, 2, 3]}} assert jsonpath.resolve(["some", "thing", "99"], data, default=0) == 0 def test_pointer_from_parts() -> None: parts: List[Union[str, int]] = ["some", "thing", 0] pointer = JSONPointer.from_parts(parts) assert str(pointer) == "/some/thing/0" def test_pointer_from_empty_parts() -> None: parts: List[Union[str, int]] = [] pointer = JSONPointer.from_parts(parts) assert str(pointer) == "" def test_pointer_from_only_empty_string_parts() -> None: parts: List[Union[str, int]] = [""] pointer = JSONPointer.from_parts(parts) assert str(pointer) == "/" def test_pointer_from_uri_encoded_parts() -> None: parts: List[Union[str, int]] = ["some%20thing", "else", 0] pointer = JSONPointer.from_parts(parts, uri_decode=True) assert str(pointer) == "/some thing/else/0" def test_index_with_leading_zero() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer("/some/thing/0") assert pointer.resolve(data) == 1 pointer = JSONPointer("/some/thing/01") with pytest.raises(JSONPointerTypeError): pointer.resolve(data) pointer = JSONPointer("/some/thing/00") with pytest.raises(JSONPointerTypeError): pointer.resolve(data) pointer = JSONPointer("/some/thing/01") with pytest.raises(JSONPointerTypeError): pointer.resolve_parent(data) def test_pointer_without_leading_slash() -> None: with pytest.raises(JSONPointerError): JSONPointer("some/thing/01") with pytest.raises(JSONPointerError): JSONPointer("nosuchthing") def test_pointer_with_leading_whitespace() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer(" /some/thing/0") assert pointer.resolve(data) == 1 assert str(pointer) == "/some/thing/0" def test_pointer_parent() -> None: data = {"some": {"thing": [1, 2, 3]}} pointer = JSONPointer("/some/thing/0") assert pointer.resolve(data) == 1 parent = pointer.parent() assert str(parent) == "/some/thing" assert parent.resolve(data) == [1, 2, 3] parent = parent.parent() assert str(parent) == "/some" assert parent.resolve(data) == {"thing": [1, 2, 3]} parent = parent.parent() assert str(parent) == "" assert parent.resolve(data) == {"some": {"thing": [1, 2, 3]}} parent = parent.parent() assert str(parent) == "" assert parent.resolve(data) == {"some": {"thing": [1, 2, 3]}} def test_join_pointers_with_slash() -> None: """Test that we can join a pointer to a relative path with the `/` operator.""" pointer = JSONPointer("/foo") assert str(pointer) == "/foo" assert str(pointer / "bar") == "/foo/bar" assert str(pointer / "baz") == "/foo/baz" assert str(pointer / "bar/baz") == "/foo/bar/baz" assert str(pointer / "bar/baz" / "0") == "/foo/bar/baz/0" assert str(pointer / "/bar") == "/bar" with pytest.raises(TypeError): pointer / 0 # type: ignore def test_join_pointers() -> None: pointer = JSONPointer("/foo") assert str(pointer) == "/foo" assert str(pointer.join("bar")) == "/foo/bar" assert str(pointer.join("baz")) == "/foo/baz" assert str(pointer.join("bar/baz")) == "/foo/bar/baz" assert str(pointer.join("bar", "baz")) == "/foo/bar/baz" assert str(pointer.join("bar/baz", "0")) == "/foo/bar/baz/0" assert str(pointer.join("/bar")) == "/bar" assert str(pointer.join("/bar", "0")) == "/bar/0" with pytest.raises(TypeError): pointer.join(0) # type: ignore def test_pointer_exists() -> None: data = {"some": {"thing": [1, 2, 3]}, "other": None} assert JSONPointer("/some/thing").exists(data) is True assert JSONPointer("/other").exists(data) is True assert JSONPointer("/nosuchthing").exists(data) is False def test_non_standard_property_pointer() -> None: data = {"foo": {"bar": [1, 2, 3], "#baz": "hello"}} assert JSONPointer("/foo/#bar").resolve(data) == "bar" assert JSONPointer("/foo/#baz").resolve(data) == "hello" def test_non_standard_index_pointer() -> None: data = {"foo": {"bar": [1, 2, 3], "#baz": "hello"}} assert JSONPointer("/foo/bar/#1").resolve(data) == 1 with pytest.raises(JSONPointerIndexError): JSONPointer("/foo/bar/#9").resolve(data) def test_non_standard_index_pointer_with_leading_zero() -> None: data = {"foo": {"bar": [1, 2, 3], "#baz": "hello"}} with pytest.raises(JSONPointerTypeError): JSONPointer("/foo/bar/#01").resolve(data) with pytest.raises(JSONPointerTypeError): JSONPointer("/foo/bar/#09").resolve(data) def test_non_standard_index_pointer_to_non_array_object() -> None: data = {"foo": {"bar": True, "#baz": "hello"}} with pytest.raises(JSONPointerTypeError): JSONPointer("/foo/bar/#1").resolve(data) def test_trailing_slash() -> None: data = {"foo": {"": [1, 2, 3], " ": [4, 5, 6]}} assert JSONPointer("/foo/").resolve(data) == [1, 2, 3] assert JSONPointer("/foo/ ").resolve(data) == [4, 5, 6] def test_index_token_on_string_value() -> None: data = {"foo": "bar"} pointer = JSONPointer("/foo/1") with pytest.raises(JSONPointerTypeError): pointer.resolve(data) def test_index_like_token_on_object_value() -> None: data = {"foo": {"-1": "bar"}} pointer = JSONPointer("/foo/-1") assert pointer.resolve(data) == "bar" jg-rp-python-jsonpath-830094f/tests/test_json_pointer_rfc6901.py000066400000000000000000000063631512714264000246230ustar00rootroot00000000000000"""Test cases from rfc6901 examples. The test cases defined here are taken from rfc6901. The appropriate Simplified BSD License is included below. Copyright (c) 2013 IETF Trust and the persons identified as authors of the code. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Internet Society, IETF or IETF Trust, nor the names of specific contributors, may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import dataclasses import pytest from jsonpath import JSONPointer @dataclasses.dataclass class Case: pointer: str want: object RFC6901_DOCUMENT = { "foo": ["bar", "baz"], "": 0, "a/b": 1, "c%d": 2, "e^f": 3, "g|h": 4, "i\\j": 5, 'k"l': 6, " ": 7, "m~n": 8, } RFC6901_TEST_CASES = [ Case(pointer="", want=RFC6901_DOCUMENT), Case(pointer="/foo", want=["bar", "baz"]), Case(pointer="/foo/0", want="bar"), Case(pointer="/", want=0), Case(pointer="/a~1b", want=1), Case(pointer="/c%d", want=2), Case(pointer="/e^f", want=3), Case(pointer="/g|h", want=4), Case(pointer=r"/i\\j", want=5), Case(pointer='/k"l', want=6), Case(pointer="/ ", want=7), Case(pointer="/m~0n", want=8), ] RFC6901_URI_TEST_CASES = [ Case("", want=RFC6901_DOCUMENT), Case("/foo", want=["bar", "baz"]), Case("/foo/0", want="bar"), Case("/", want=0), Case("/a~1b", want=1), Case("/c%25d", want=2), Case("/e%5Ef", want=3), Case("/g%7Ch", want=4), Case("/i%5Cj", want=5), Case("/k%22l", want=6), Case("/%20", want=7), Case("/m~0n", want=8), ] @pytest.mark.parametrize("case", RFC6901_TEST_CASES) def test_rfc6901_examples(case: Case) -> None: pointer = JSONPointer(case.pointer) assert pointer.resolve(RFC6901_DOCUMENT) == case.want @pytest.mark.parametrize("case", RFC6901_URI_TEST_CASES) def test_rfc6901_uri_examples(case: Case) -> None: pointer = JSONPointer(case.pointer, unicode_escape=False, uri_decode=True) assert pointer.resolve(RFC6901_DOCUMENT) == case.want jg-rp-python-jsonpath-830094f/tests/test_key_selector.py000066400000000000000000000025261512714264000234250ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/key_selector.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_key_selector(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_key_selector_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_key_selector_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_keys_filter_selector.py000066400000000000000000000025661512714264000251610ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/keys_filter_selector.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_keys_filter_selector(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_keys_filter_selector_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_keys_filter_selector_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_keys_function.py000066400000000000000000000027751512714264000236230ustar00rootroot00000000000000import dataclasses import operator from typing import Any from typing import Mapping from typing import Sequence from typing import Union import pytest from jsonpath import JSONPathEnvironment from jsonpath import function_extensions @dataclasses.dataclass class Case: description: str path: str data: Union[Sequence[Any], Mapping[str, Any]] want: Union[Sequence[Any], Mapping[str, Any]] TEST_CASES = [ Case( description="value in keys of an object", path="$.some[?'thing' in keys(@)]", data={"some": [{"thing": "foo"}]}, want=[{"thing": "foo"}], ), Case( description="value not in keys of an object", path="$.some[?'else' in keys(@)]", data={"some": [{"thing": "foo"}]}, want=[], ), Case( description="keys of an array", path="$[?'thing' in keys(@)]", data={"some": [{"thing": "foo"}]}, want=[], ), Case( description="keys of an string value", path="$some[0].thing[?'else' in keys(@)]", data={"some": [{"thing": "foo"}]}, want=[], ), ] @pytest.fixture() def env() -> JSONPathEnvironment: _env = JSONPathEnvironment() _env.function_extensions["keys"] = function_extensions.Keys() return _env @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_isinstance_function(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) assert path.findall(case.data) == case.want jg-rp-python-jsonpath-830094f/tests/test_keys_selector.py000066400000000000000000000025321512714264000236050ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/keys_selector.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_keys_selector(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_keys_selector_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_keys_selector_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_lex.json000066400000000000000000002146001512714264000220440ustar00rootroot00000000000000{ "description" : "Test cases for test_lex unit tests.", "tests": [ { "description": "just root", "path": "$", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$" } ] }, { "description": "just pseudo-root", "path": "^", "want": [ { "kind": "TOKEN_PSEUDO_ROOT", "value": "^", "index": 0, "path": "^" } ] }, { "description": "root dot property", "path": "$.some.thing", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.some.thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.some.thing" }, { "kind": "TOKEN_NAME", "value": "some", "index": 2, "path": "$.some.thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 6, "path": "$.some.thing" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 7, "path": "$.some.thing" } ] }, { "description": "pseudo root dot property", "path": "^.some.thing", "want": [ { "kind": "TOKEN_PSEUDO_ROOT", "value": "^", "index": 0, "path": "^.some.thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "^.some.thing" }, { "kind": "TOKEN_NAME", "value": "some", "index": 2, "path": "^.some.thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 6, "path": "^.some.thing" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 7, "path": "^.some.thing" } ] }, { "description": "root bracket property", "path": "$[some][thing]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$[some][thing]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$[some][thing]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 2, "path": "$[some][thing]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 6, "path": "$[some][thing]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 7, "path": "$[some][thing]" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 8, "path": "$[some][thing]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 13, "path": "$[some][thing]" } ] }, { "description": "root double quoted property", "path": "$[\"some\"]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$[\"some\"]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$[\"some\"]" }, { "kind": "TOKEN_DOUBLE_QUOTE_STRING", "value": "some", "index": 3, "path": "$[\"some\"]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 8, "path": "$[\"some\"]" } ] }, { "description": "root single quoted property", "path": "$['some']", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$['some']" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$['some']" }, { "kind": "TOKEN_SINGLE_QUOTE_STRING", "value": "some", "index": 3, "path": "$['some']" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 8, "path": "$['some']" } ] }, { "description": "root dot bracket property", "path": "$.[some][thing]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.[some][thing]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.[some][thing]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 2, "path": "$.[some][thing]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 3, "path": "$.[some][thing]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 7, "path": "$.[some][thing]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 8, "path": "$.[some][thing]" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 9, "path": "$.[some][thing]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 14, "path": "$.[some][thing]" } ] }, { "description": "root bracket index", "path": "$[1]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$[1]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$[1]" }, { "kind": "TOKEN_INT", "value": "1", "index": 2, "path": "$[1]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 3, "path": "$[1]" } ] }, { "description": "root dot bracket index", "path": "$.[1]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.[1]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.[1]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 2, "path": "$.[1]" }, { "kind": "TOKEN_INT", "value": "1", "index": 3, "path": "$.[1]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 4, "path": "$.[1]" } ] }, { "description": "empty slice", "path": "[:]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[:]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 1, "path": "[:]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 2, "path": "[:]" } ] }, { "description": "empty slice empty step", "path": "[::]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[::]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 1, "path": "[::]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 2, "path": "[::]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 3, "path": "[::]" } ] }, { "description": "slice empty stop", "path": "[1:]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[1:]" }, { "kind": "TOKEN_INT", "value": "1", "index": 1, "path": "[1:]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 2, "path": "[1:]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 3, "path": "[1:]" } ] }, { "description": "slice empty start", "path": "[:-1]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[:-1]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 1, "path": "[:-1]" }, { "kind": "TOKEN_INT", "value": "-1", "index": 2, "path": "[:-1]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 4, "path": "[:-1]" } ] }, { "description": "slice start and stop", "path": "[1:7]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[1:7]" }, { "kind": "TOKEN_INT", "value": "1", "index": 1, "path": "[1:7]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 2, "path": "[1:7]" }, { "kind": "TOKEN_INT", "value": "7", "index": 3, "path": "[1:7]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 4, "path": "[1:7]" } ] }, { "description": "slice start, stop and step", "path": "[1:7:2]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[1:7:2]" }, { "kind": "TOKEN_INT", "value": "1", "index": 1, "path": "[1:7:2]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 2, "path": "[1:7:2]" }, { "kind": "TOKEN_INT", "value": "7", "index": 3, "path": "[1:7:2]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 4, "path": "[1:7:2]" }, { "kind": "TOKEN_INT", "value": "2", "index": 5, "path": "[1:7:2]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 6, "path": "[1:7:2]" } ] }, { "description": "root dot wild", "path": "$.*", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.*" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.*" }, { "kind": "TOKEN_WILD", "value": "*", "index": 2, "path": "$.*" } ] }, { "description": "root bracket wild", "path": "$[*]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$[*]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$[*]" }, { "kind": "TOKEN_WILD", "value": "*", "index": 2, "path": "$[*]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 3, "path": "$[*]" } ] }, { "description": "root dot bracket wild", "path": "$.[*]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.[*]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.[*]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 2, "path": "$.[*]" }, { "kind": "TOKEN_WILD", "value": "*", "index": 3, "path": "$.[*]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 4, "path": "$.[*]" } ] }, { "description": "root descend", "path": "$..", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.." }, { "kind": "TOKEN_DDOT", "value": "..", "index": 1, "path": "$.." } ] }, { "description": "root descend property", "path": "$..thing", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$..thing" }, { "kind": "TOKEN_DDOT", "value": "..", "index": 1, "path": "$..thing" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 3, "path": "$..thing" } ] }, { "description": "root descend dot property", "path": "$...thing", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$...thing" }, { "kind": "TOKEN_DDOT", "value": "..", "index": 1, "path": "$...thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 3, "path": "$...thing" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 4, "path": "$...thing" } ] }, { "description": "root selector list of indices", "path": "$[1,4,5]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$[1,4,5]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$[1,4,5]" }, { "kind": "TOKEN_INT", "value": "1", "index": 2, "path": "$[1,4,5]" }, { "kind": "TOKEN_COMMA", "value": ",", "index": 3, "path": "$[1,4,5]" }, { "kind": "TOKEN_INT", "value": "4", "index": 4, "path": "$[1,4,5]" }, { "kind": "TOKEN_COMMA", "value": ",", "index": 5, "path": "$[1,4,5]" }, { "kind": "TOKEN_INT", "value": "5", "index": 6, "path": "$[1,4,5]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 7, "path": "$[1,4,5]" } ] }, { "description": "root selector list with a slice", "path": "$[1,4:9]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$[1,4:9]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$[1,4:9]" }, { "kind": "TOKEN_INT", "value": "1", "index": 2, "path": "$[1,4:9]" }, { "kind": "TOKEN_COMMA", "value": ",", "index": 3, "path": "$[1,4:9]" }, { "kind": "TOKEN_INT", "value": "4", "index": 4, "path": "$[1,4:9]" }, { "kind": "TOKEN_COLON", "value": ":", "index": 5, "path": "$[1,4:9]" }, { "kind": "TOKEN_INT", "value": "9", "index": 6, "path": "$[1,4:9]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 7, "path": "$[1,4:9]" } ] }, { "description": "root selector list of properties", "path": "$[some,thing]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$[some,thing]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$[some,thing]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 2, "path": "$[some,thing]" }, { "kind": "TOKEN_COMMA", "value": ",", "index": 6, "path": "$[some,thing]" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 7, "path": "$[some,thing]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 12, "path": "$[some,thing]" } ] }, { "description": "root dot filter on self dot property", "path": "$.[?(@.some)]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 2, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 3, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 4, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 5, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 6, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 7, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 11, "path": "$.[?(@.some)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 12, "path": "$.[?(@.some)]" } ] }, { "description": "root dot filter on root dot property", "path": "$.[?($.some)]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.[?($.some)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.[?($.some)]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 2, "path": "$.[?($.some)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 3, "path": "$.[?($.some)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 4, "path": "$.[?($.some)]" }, { "kind": "TOKEN_ROOT", "value": "$", "index": 5, "path": "$.[?($.some)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 6, "path": "$.[?($.some)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 7, "path": "$.[?($.some)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 11, "path": "$.[?($.some)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 12, "path": "$.[?($.some)]" } ] }, { "description": "root dot filter on self index", "path": "$.[?(@[1])]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 2, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 3, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 4, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 5, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 6, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_INT", "value": "1", "index": 7, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 8, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 9, "path": "$.[?(@[1])]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 10, "path": "$.[?(@[1])]" } ] }, { "description": "filter self dot property equality with float", "path": "[?(@.some == 1.1)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 10, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_FLOAT", "value": "1.1", "index": 13, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 16, "path": "[?(@.some == 1.1)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 17, "path": "[?(@.some == 1.1)]" } ] }, { "description": "filter self dot property equality float in scientific notation", "path": "[?(@.some == 1.1e10)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 10, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_FLOAT", "value": "1.1e10", "index": 13, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 19, "path": "[?(@.some == 1.1e10)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 20, "path": "[?(@.some == 1.1e10)]" } ] }, { "description": "filter self index equality with float", "path": "[?(@[1] == 1.1)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 4, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_INT", "value": "1", "index": 5, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 6, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 7, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 8, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 10, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_FLOAT", "value": "1.1", "index": 11, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 14, "path": "[?(@[1] == 1.1)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 15, "path": "[?(@[1] == 1.1)]" } ] }, { "description": "filter self dot property equality with int", "path": "[?(@.some == 1)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 10, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_INT", "value": "1", "index": 13, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 14, "path": "[?(@.some == 1)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 15, "path": "[?(@.some == 1)]" } ] }, { "description": "filter self dot property equality with int in scientific notation", "path": "[?(@.some == 1e10)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 10, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_INT", "value": "1e10", "index": 13, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 17, "path": "[?(@.some == 1e10)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 18, "path": "[?(@.some == 1e10)]" } ] }, { "description": "filter self dot property regex", "path": "[?(@.some =~ /foo|bar/i)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_RE", "value": "=~", "index": 10, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_RE_PATTERN", "value": "foo|bar", "index": 14, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_RE_FLAGS", "value": "i", "index": 22, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 23, "path": "[?(@.some =~ /foo|bar/i)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 24, "path": "[?(@.some =~ /foo|bar/i)]" } ] }, { "description": "union of two paths", "path": "$.some | $.thing", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.some | $.thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.some | $.thing" }, { "kind": "TOKEN_NAME", "value": "some", "index": 2, "path": "$.some | $.thing" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 6, "path": "$.some | $.thing" }, { "kind": "TOKEN_UNION", "value": "|", "index": 7, "path": "$.some | $.thing" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 8, "path": "$.some | $.thing" }, { "kind": "TOKEN_ROOT", "value": "$", "index": 9, "path": "$.some | $.thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 10, "path": "$.some | $.thing" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 11, "path": "$.some | $.thing" } ] }, { "description": "union of three paths", "path": "$.some | $.thing | $.other", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_NAME", "value": "some", "index": 2, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 6, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_UNION", "value": "|", "index": 7, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 8, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_ROOT", "value": "$", "index": 9, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_DOT", "value": ".", "index": 10, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 11, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 16, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_UNION", "value": "|", "index": 17, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 18, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_ROOT", "value": "$", "index": 19, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_DOT", "value": ".", "index": 20, "path": "$.some | $.thing | $.other" }, { "kind": "TOKEN_NAME", "value": "other", "index": 21, "path": "$.some | $.thing | $.other" } ] }, { "description": "intersection two paths", "path": "$.some & $.thing", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.some & $.thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.some & $.thing" }, { "kind": "TOKEN_NAME", "value": "some", "index": 2, "path": "$.some & $.thing" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 6, "path": "$.some & $.thing" }, { "kind": "TOKEN_INTERSECTION", "value": "&", "index": 7, "path": "$.some & $.thing" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 8, "path": "$.some & $.thing" }, { "kind": "TOKEN_ROOT", "value": "$", "index": 9, "path": "$.some & $.thing" }, { "kind": "TOKEN_DOT", "value": ".", "index": 10, "path": "$.some & $.thing" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 11, "path": "$.some & $.thing" } ] }, { "description": "filter expression with logical and", "path": "[?(@.some > 1 and @.some < 5)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_GT", "value": ">", "index": 10, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 11, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_INT", "value": "1", "index": 12, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 13, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_AND", "value": "and", "index": 14, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 17, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 18, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 19, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 20, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 24, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_LT", "value": "<", "index": 25, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 26, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_INT", "value": "5", "index": 27, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 28, "path": "[?(@.some > 1 and @.some < 5)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 29, "path": "[?(@.some > 1 and @.some < 5)]" } ] }, { "description": "filter expression with logical or", "path": "[?(@.some == 1 or @.some == 5)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 10, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_INT", "value": "1", "index": 13, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 14, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_OR", "value": "or", "index": 15, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 17, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 18, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 19, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 20, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 24, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 25, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 27, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_INT", "value": "5", "index": 28, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 29, "path": "[?(@.some == 1 or @.some == 5)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 30, "path": "[?(@.some == 1 or @.some == 5)]" } ] }, { "description": "filter expression with logical ||", "path": "[?(@.some == 1 || @.some == 5)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 10, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_INT", "value": "1", "index": 13, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 14, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_OR", "value": "||", "index": 15, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 17, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 18, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 19, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 20, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 24, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 25, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 27, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_INT", "value": "5", "index": 28, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 29, "path": "[?(@.some == 1 || @.some == 5)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 30, "path": "[?(@.some == 1 || @.some == 5)]" } ] }, { "description": "filter self dot property in list literal", "path": "[?(@.thing in [1, '1'])]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 5, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 10, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_IN", "value": "in", "index": 11, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 13, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 14, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_INT", "value": "1", "index": 15, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_COMMA", "value": ",", "index": 16, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 17, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_SINGLE_QUOTE_STRING", "value": "1", "index": 19, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 21, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 22, "path": "[?(@.thing in [1, '1'])]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 23, "path": "[?(@.thing in [1, '1'])]" } ] }, { "description": "filter expression with logical not", "path": "[?(@.some == 1 or not @.some < 5)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 10, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_INT", "value": "1", "index": 13, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 14, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_OR", "value": "or", "index": 15, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 17, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_NOT", "value": "not", "index": 18, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 21, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 22, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 23, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 24, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 28, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_LT", "value": "<", "index": 29, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 30, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_INT", "value": "5", "index": 31, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 32, "path": "[?(@.some == 1 or not @.some < 5)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 33, "path": "[?(@.some == 1 or not @.some < 5)]" } ] }, { "description": "filter expression with logical not using '!'", "path": "[?(@.some == 1 or !@.some < 5)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 3, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 4, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 5, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 10, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 12, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_INT", "value": "1", "index": 13, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 14, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_OR", "value": "or", "index": 15, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 17, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_NOT", "value": "!", "index": 18, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 19, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 20, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 21, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 25, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_LT", "value": "<", "index": 26, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 27, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_INT", "value": "5", "index": 28, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 29, "path": "[?(@.some == 1 or !@.some < 5)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 30, "path": "[?(@.some == 1 or !@.some < 5)]" } ] }, { "description": "filter true and false", "path": "[?(true == false)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(true == false)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(true == false)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(true == false)]" }, { "kind": "TOKEN_TRUE", "value": "true", "index": 3, "path": "[?(true == false)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 7, "path": "[?(true == false)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 8, "path": "[?(true == false)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 10, "path": "[?(true == false)]" }, { "kind": "TOKEN_FALSE", "value": "false", "index": 11, "path": "[?(true == false)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 16, "path": "[?(true == false)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 17, "path": "[?(true == false)]" } ] }, { "description": "filter true and false", "path": "[?(nil == none && nil == null)]", "want": [ { "kind": "TOKEN_LBRACKET", "value": "[", "index": 0, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 1, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 2, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_NIL", "value": "nil", "index": 3, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 6, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 7, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_NIL", "value": "none", "index": 10, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 14, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_AND", "value": "&&", "index": 15, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 17, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_NIL", "value": "nil", "index": 18, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 21, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_EQ", "value": "==", "index": 22, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 24, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_NIL", "value": "null", "index": 25, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 29, "path": "[?(nil == none && nil == null)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 30, "path": "[?(nil == none && nil == null)]" } ] }, { "description": "list of quoted properties", "path": "$['some', 'thing']", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$['some', 'thing']" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 1, "path": "$['some', 'thing']" }, { "kind": "TOKEN_SINGLE_QUOTE_STRING", "value": "some", "index": 3, "path": "$['some', 'thing']" }, { "kind": "TOKEN_COMMA", "value": ",", "index": 8, "path": "$['some', 'thing']" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 9, "path": "$['some', 'thing']" }, { "kind": "TOKEN_SINGLE_QUOTE_STRING", "value": "thing", "index": 11, "path": "$['some', 'thing']" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 17, "path": "$['some', 'thing']" } ] }, { "description": "call a function", "path": "$.some[?(length(@.thing) < 2)]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_NAME", "value": "some", "index": 2, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 6, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_FILTER", "value": "?", "index": 7, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 8, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_FUNCTION", "value": "length", "index": 9, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_LPAREN", "value": "(", "index": 15, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_SELF", "value": "@", "index": 16, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 17, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 18, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 23, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 24, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_LT", "value": "<", "index": 25, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_WHITESPACE", "value": " ", "index": 26, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_INT", "value": "2", "index": 27, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_RPAREN", "value": ")", "index": 28, "path": "$.some[?(length(@.thing) < 2)]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 29, "path": "$.some[?(length(@.thing) < 2)]" } ] }, { "description": "keys selector", "path": "$.thing.~", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.thing.~" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.thing.~" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 2, "path": "$.thing.~" }, { "kind": "TOKEN_DOT", "value": ".", "index": 7, "path": "$.thing.~" }, { "kind": "TOKEN_KEYS", "value": "~", "index": 8, "path": "$.thing.~" } ] }, { "description": "keys in list selector", "path": "$.thing[~]", "want": [ { "kind": "TOKEN_ROOT", "value": "$", "index": 0, "path": "$.thing[~]" }, { "kind": "TOKEN_DOT", "value": ".", "index": 1, "path": "$.thing[~]" }, { "kind": "TOKEN_NAME", "value": "thing", "index": 2, "path": "$.thing[~]" }, { "kind": "TOKEN_LBRACKET", "value": "[", "index": 7, "path": "$.thing[~]" }, { "kind": "TOKEN_KEYS", "value": "~", "index": 8, "path": "$.thing[~]" }, { "kind": "TOKEN_RBRACKET", "value": "]", "index": 9, "path": "$.thing[~]" } ] }, { "description": "implicit root selector, name selector starts with `and`", "path": "anderson", "want": [ { "kind": "TOKEN_NAME", "value": "anderson", "index": 0, "path": "anderson" } ] }, { "description": "implicit root selector, name selector starts with `or`", "path": "order", "want": [ { "kind": "TOKEN_NAME", "value": "order", "index": 0, "path": "order" } ] }, { "description": "implicit root selector, name selector starts with `true`", "path": "trueblue", "want": [ { "kind": "TOKEN_NAME", "value": "trueblue", "index": 0, "path": "trueblue" } ] }, { "description": "implicit root selector, name selector starts with `false`", "path": "falsehood", "want": [ { "kind": "TOKEN_NAME", "value": "falsehood", "index": 0, "path": "falsehood" } ] }, { "description": "implicit root selector, name selector starts with `not`", "path": "nottingham", "want": [ { "kind": "TOKEN_NAME", "value": "nottingham", "index": 0, "path": "nottingham" } ] }, { "description": "implicit root selector, name selector starts with `null`", "path": "nullable", "want": [ { "kind": "TOKEN_NAME", "value": "nullable", "index": 0, "path": "nullable" } ] }, { "description": "implicit root selector, name selector starts with `none`", "path": "nonexpert", "want": [ { "kind": "TOKEN_NAME", "value": "nonexpert", "index": 0, "path": "nonexpert" } ] }, { "description": "implicit root selector, name selector starts with `undefined`", "path": "undefinedness", "want": [ { "kind": "TOKEN_NAME", "value": "undefinedness", "index": 0, "path": "undefinedness" } ] }, { "description": "implicit root selector, name selector starts with `missing`", "path": "missingly", "want": [ { "kind": "TOKEN_NAME", "value": "missingly", "index": 0, "path": "missingly" } ] } ] }jg-rp-python-jsonpath-830094f/tests/test_lex.py000066400000000000000000000014671512714264000215300ustar00rootroot00000000000000import json import operator from typing import Any from typing import Dict import pytest from jsonpath import DEFAULT_ENV from jsonpath.exceptions import JSONPathSyntaxError from jsonpath.token import Token with open("tests/test_lex.json", encoding="UTF-8") as fd: """Loads the test case data. Each test case is: description: str path: str want: List[Token] """ CASES = json.load(fd)["tests"] @pytest.mark.parametrize("case", CASES, ids=operator.itemgetter("description")) def test_default_lexer(case: Dict[str, Any]) -> None: tokens = list(DEFAULT_ENV.lexer.tokenize(case["path"])) want = [Token(**token) for token in case["want"]] assert tokens == want def test_illegal_token() -> None: with pytest.raises(JSONPathSyntaxError): list(DEFAULT_ENV.lexer.tokenize("%")) jg-rp-python-jsonpath-830094f/tests/test_match_api.py000066400000000000000000000026361512714264000226640ustar00rootroot00000000000000import jsonpath def test_match_string_repr() -> None: matches = list(jsonpath.finditer("$.*", ["thing"])) assert len(matches) == 1 assert str(matches[0]) == "'thing' @ $[0]" def test_truncate_match_string_repr() -> None: matches = list( jsonpath.finditer("$.*", ["something long that needs to be truncated"]) ) assert len(matches) == 1 assert str(matches[0]) == "'something long that needs to...' @ $[0]" def test_parent_match() -> None: matches = list(jsonpath.finditer("$.some.thing", {"some": {"thing": "else"}})) assert len(matches) == 1 assert matches[0].obj == "else" assert matches[0].parent is not None assert matches[0].parent.obj == {"thing": "else"} assert matches[0].parent.path == "$['some']" def test_match_parts() -> None: matches = list(jsonpath.finditer("$.some.thing", {"some": {"thing": "else"}})) assert len(matches) == 1 assert matches[0].obj == "else" assert matches[0].parts == ("some", "thing") def test_child_matches() -> None: matches = list(jsonpath.finditer("$.things.*", {"things": ["foo", "bar"]})) assert len(matches) == 2 # noqa: PLR2004 assert matches[0].obj == "foo" assert matches[1].obj == "bar" assert matches[0].parent is not None children = matches[0].parent.children assert len(children) == 2 # noqa: PLR2004 assert children[0].obj == "foo" assert children[1].obj == "bar" jg-rp-python-jsonpath-830094f/tests/test_membership_operators.py000066400000000000000000000025651512714264000251710ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/membership_operators.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_membership_operators(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_membership_operators_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_membership_operators_fail_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_nts.py000066400000000000000000000024571512714264000215440ustar00rootroot00000000000000"""Test Python JSONPath against the Normalized Path Test Suite.""" import json import operator from dataclasses import dataclass from typing import Any from typing import List import pytest import jsonpath @dataclass class NormalizedCase: name: str query: str document: Any paths: List[str] def normalized_cases() -> List[NormalizedCase]: with open("tests/nts/normalized_paths.json", encoding="utf8") as fd: data = json.load(fd) return [NormalizedCase(**case) for case in data["tests"]] @pytest.mark.parametrize("case", normalized_cases(), ids=operator.attrgetter("name")) def test_nts_normalized_paths(case: NormalizedCase) -> None: nodes = jsonpath.NodeList(jsonpath.finditer(case.query, case.document)) paths = nodes.paths() assert paths == case.paths @dataclass class CanonicalCase: name: str query: str canonical: str def canonical_cases() -> List[CanonicalCase]: with open("tests/nts/canonical_paths.json", encoding="utf8") as fd: data = json.load(fd) return [CanonicalCase(**case) for case in data["tests"]] @pytest.mark.parametrize("case", canonical_cases(), ids=operator.attrgetter("name")) def test_nts_canonical_paths(case: CanonicalCase) -> None: query = jsonpath.compile(case.query) assert str(query) == case.canonical jg-rp-python-jsonpath-830094f/tests/test_parse.py000066400000000000000000000145621512714264000220520ustar00rootroot00000000000000import dataclasses import operator import pytest from jsonpath import JSONPathEnvironment @dataclasses.dataclass class Case: description: str path: str want: str TEST_CASES = [ Case(description="empty", path="", want="$"), Case(description="just root", path="$", want="$"), Case(description="implicit root dot property", path=".thing", want="$['thing']"), Case(description="root dot property", path="$.thing", want="$['thing']"), Case( description="root double quoted property", path='$["thing"]', want="$['thing']" ), Case( description="root single quoted property", path="$['thing']", want="$['thing']" ), Case( description="root quoted property with non-ident chars", path="$['anything{!%']", want="$['anything{!%']", ), Case(description="root bracket index", path="$[1]", want="$[1]"), Case(description="root slice", path="$[1:-1]", want="$[1:-1:1]"), Case(description="root slice with step", path="$[1:-1:2]", want="$[1:-1:2]"), Case(description="root slice with empty start", path="$[:-1]", want="$[:-1:1]"), Case(description="root slice with empty stop", path="$[1:]", want="$[1::1]"), Case(description="root dot wild", path="$.*", want="$[*]"), Case(description="root bracket wild", path="$[*]", want="$[*]"), Case(description="root selector list", path="$[1,2]", want="$[1, 2]"), Case( description="root selector list with slice", path="$[1,5:-1:1]", want="$[1, 5:-1:1]", ), Case( description="root selector list with quoted properties", path="$[\"some\",'thing']", want="$['some', 'thing']", ), Case( description="filter self dot property", path="[?(@.thing)]", want="$[?@['thing']]", ), Case( description="filter root dot property", path="$.some[?($.thing)]", want="$['some'][?$['thing']]", ), Case( description="filter with equality test", path="$.some[?(@.thing == 7)]", want="$['some'][?@['thing'] == 7]", ), Case( description="filter with >=", path="$.some[?(@.thing >= 7)]", want="$['some'][?@['thing'] >= 7]", ), Case( description="filter with >=", path="$.some[?(@.thing >= 7)]", want="$['some'][?@['thing'] >= 7]", ), Case( description="filter with !=", path="$.some[?(@.thing != 7)]", want="$['some'][?@['thing'] != 7]", ), Case( description="filter with <>", path="$.some[?(@.thing != 7)]", want="$['some'][?@['thing'] != 7]", ), Case( description="filter with regex", path="$.some[?(@.thing =~ /(foo|bar)/i)]", want="$['some'][?@['thing'] =~ /(foo|bar)/i]", ), Case( description="filter with list membership test", path="$.some[?(@.thing in ['foo', 'bar', 42])]", want="$['some'][?@['thing'] in ['foo', 'bar', 42]]", ), Case( description="filter with boolean literals", path="$.some[?(true == false)]", want="$['some'][?true == false]", ), Case( description="filter with nil literal", path="$.some[?(@.thing == nil)]", want="$['some'][?@['thing'] == nil]", ), Case( description="null is the same as nil", path="$.some[?(@.thing == null)]", want="$['some'][?@['thing'] == nil]", ), Case( description="none is the same as nil", path="$.some[?(@.thing == none)]", want="$['some'][?@['thing'] == nil]", ), Case( description="filter with test for undefined", path="$.some[?(@.thing == undefined)]", want="$['some'][?@['thing'] == undefined]", ), Case( description="missing is the same as undefined", path="$.some[?(@.thing == missing)]", want="$['some'][?@['thing'] == undefined]", ), Case( description="filter with string literal", path="$.some[?(@.thing == 'foo')]", want="$['some'][?@['thing'] == 'foo']", ), Case( description="filter with integer literal", path="$.some[?(@.thing == 1)]", want="$['some'][?@['thing'] == 1]", ), Case( description="filter with float literal", path="$.some[?(@.thing == 1.1)]", want="$['some'][?@['thing'] == 1.1]", ), Case( description="filter with logical not", path="$.some[?(@.thing > 1 and not $.other)]", want="$['some'][?@['thing'] > 1 && !$['other']]", ), Case( description="filter with grouped expression", path="$.some[?(@.thing > 1 and ($.foo or $.bar))]", want="$['some'][?@['thing'] > 1 && ($['foo'] || $['bar'])]", ), Case( description="keys selector", path="$.some.~", want="$['some'][~]", ), Case( description="current key identifier", path="$[?# > 2]", want="$[?# > 2]", ), Case( description="comparison to single quoted string literal with escape", path="$[?@.foo == 'ba\\'r']", want="$[?@['foo'] == 'ba\\'r']", ), Case( description="comparison to double quoted string literal with escape", path='$[?@.foo == "ba\\"r"]', want="$[?@['foo'] == 'ba\"r']", ), Case( description="not binds more tightly than or", path="$[?!@.a || !@.b]", want="$[?!@['a'] || !@['b']]", ), Case( description="not binds more tightly than and", path="$[?!@.a && !@.b]", want="$[?!@['a'] && !@['b']]", ), Case( description="control precedence with parens", path="$[?!(@.a && !@.b)]", want="$[?!(@['a'] && !@['b'])]", ), Case( description="issue 70", path=r"$[?@ =~ /\d/]", want="$[?@ =~ /\\d/]", ), Case( description="escaped slash in regex literal", path=r"$[?@ =~ /\\d/]", want="$[?@ =~ /\\\\d/]", ), Case( description="match function", path=r"$[?match(@, '\\d')]", want="$[?match(@, '\\\\d')]", ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_default_parser(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) assert str(path) == case.want jg-rp-python-jsonpath-830094f/tests/test_parse_compound_path.py000066400000000000000000000021251512714264000247620ustar00rootroot00000000000000import dataclasses import operator import pytest from jsonpath import JSONPathEnvironment @dataclasses.dataclass class Case: description: str path: str want: str TEST_CASES = [ Case( description="union of two paths", path="$.some | $.thing", want="$['some'] | $['thing']", ), Case( description="union of three paths", path="$.some | $.thing | [0]", want="$['some'] | $['thing'] | $[0]", ), Case( description="intersection of two paths", path="$.some.* & $.thing.*", want="$['some'][*] & $['thing'][*]", ), Case( description="intersection then union", path="$.some.* & $.thing.* | $.other", want="$['some'][*] & $['thing'][*] | $['other']", ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_parse_compound_path(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) assert str(path) == case.want jg-rp-python-jsonpath-830094f/tests/test_pseudo_root_identifier.py000066400000000000000000000025761512714264000255060ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/pseudo_root_identifier.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_pseudo_root_identifier(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_pseudo_root_identifier_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_pseudo_root_identifier_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_query_intersection.py000066400000000000000000000026171512714264000246710ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/query_intersection.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_query_intersection_operator(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_query_intersection_operator_async( env: JSONPathEnvironment, case: Case ) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_query_intersection_operator_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_query_projection.py000066400000000000000000000121741512714264000243360ustar00rootroot00000000000000"""Test cases for the fluent API projection.""" from typing import Any from typing import List import jsonpath from jsonpath import Projection def test_select_from_primitive() -> None: expr = "$[0].a" data = [{"a": 1, "b": 1}, {"a": 2, "b": 2}, {"b": 3, "a": 3}] projection = ("nosuchthing",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [] def test_top_level_array() -> None: expr = "$.*" data = [{"a": 1, "b": 1}, {"a": 2, "b": 2}, {"b": 3, "a": 3}] projection = ("a",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [{"a": 1}, {"a": 2}, {"a": 3}] def test_top_level_array_flat_projection() -> None: expr = "$.*" data = [{"a": 1, "b": 1}, {"a": 2, "b": 2}, {"b": 3, "a": 3}] projection = ("a",) it = jsonpath.query(expr, data).select(*projection, projection=Projection.FLAT) assert list(it) == [[1], [2], [3]] def test_top_level_array_partial_existence() -> None: expr = "$.*" data = [{"a": 1, "b": 1}, {"b": 2}, {"b": 3, "a": 3}] projection = ("a",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [{"a": 1}, {"a": 3}] def test_top_level_array_projection_does_not_existence() -> None: expr = "$.*" data = [{"a": 1, "b": 1}, {"b": 2}, {"b": 3, "a": 3}] projection = ("x",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [] def test_empty_top_level_array() -> None: expr = "$.*" data: List[Any] = [] projection = ("a",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [] def test_top_level_array_select_many() -> None: expr = "$.*" data = [{"a": 1, "b": 1, "c": 1}, {"a": 2, "b": 2, "c": 2}, {"b": 3, "a": 3}] projection = ("a", "c") it = jsonpath.query(expr, data).select(*projection) assert list(it) == [{"a": 1, "c": 1}, {"a": 2, "c": 2}, {"a": 3}] def test_singular_query() -> None: expr = "$.a" data = {"a": {"foo": 42, "bar": 7}, "b": 1} projection = ("foo",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [{"foo": 42}] def test_select_array_element() -> None: expr = "$.a" data = {"a": {"foo": [42, 7], "bar": 7}, "b": 1} projection = ("foo[0]",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [{"foo": [42]}] def test_select_array_slice() -> None: expr = "$.a" data = {"a": {"foo": [1, 2, 42, 7, 3], "bar": 7}, "b": 1} projection = ("foo[2:4]",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [{"foo": [42, 7]}] def test_select_nested_objects() -> None: expr = "$.a" data = {"a": {"foo": {"bar": 42}, "bar": 7}, "b": 1} projection = ("foo.bar",) it = jsonpath.query(expr, data).select(*projection) assert list(it) == [{"foo": {"bar": 42}}] def test_select_nested_objects_root_projection() -> None: expr = "$.a" data = {"a": {"foo": {"bar": 42}, "bar": 7}, "b": 1} projection = ("foo.bar",) it = jsonpath.query(expr, data).select(*projection, projection=Projection.ROOT) assert list(it) == [{"a": {"foo": {"bar": 42}}}] def test_select_nested_objects_flat_projection() -> None: expr = "$.a" data = {"a": {"foo": {"bar": 42}, "bar": 7}, "b": 1} projection = ("foo.bar",) it = jsonpath.query(expr, data).select(*projection, projection=Projection.FLAT) assert list(it) == [[42]] def test_sparse_array_selection() -> None: expr = "$..products[?@.social]" data = { "categories": [ { "name": "footwear", "products": [ { "title": "Trainers", "description": "Fashionable trainers.", "price": 89.99, }, { "title": "Barefoot Trainers", "description": "Running trainers.", "price": 130.00, }, ], }, { "name": "headwear", "products": [ { "title": "Cap", "description": "Baseball cap", "price": 15.00, }, { "title": "Beanie", "description": "Winter running hat.", "price": 9.00, "social": {"likes": 12, "shares": 7}, }, ], }, ], "price_cap": 10, } it = jsonpath.query(expr, data).select( "title", "social.shares", projection=Projection.ROOT, ) assert list(it) == [ {"categories": [{"products": [{"title": "Beanie", "social": {"shares": 7}}]}]} ] def test_pre_compiled_select_many() -> None: expr = "$.*" data = [{"a": 1, "b": 1, "c": 1}, {"a": 2, "b": 2, "c": 2}, {"b": 3, "a": 3}] projection = (jsonpath.compile("a"), "c") it = jsonpath.query(expr, data).select(*projection) assert list(it) == [{"a": 1, "c": 1}, {"a": 2, "c": 2}, {"a": 3}] jg-rp-python-jsonpath-830094f/tests/test_query_union.py000066400000000000000000000025551512714264000233140ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/query_union.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_query_union_operator(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_query_union_operator_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_query_union_operator_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_regex_cache.py000066400000000000000000000036331512714264000231720ustar00rootroot00000000000000try: import regex as re REGEX_AVAILABLE = True except ImportError: import re # type: ignore REGEX_AVAILABLE = False try: import iregexp_check # noqa: F401 IREGEXP_AVAILABLE = True except ImportError: IREGEXP_AVAILABLE = False import pytest from jsonpath import JSONPathError from jsonpath.function_extensions import Search def test_patterns_are_cached() -> None: search_func = Search(cache_capacity=2) assert len(search_func.cache) == 0 assert search_func("abcdef", "bc.") assert len(search_func.cache) == 1 def test_malformed_patterns_are_cached() -> None: search_func = Search(cache_capacity=2) assert len(search_func.cache) == 0 assert search_func("abcdef", "bc[") is False assert len(search_func.cache) == 1 assert search_func.cache["bc["] is None @pytest.mark.skipif(IREGEXP_AVAILABLE is False, reason="requires iregexp_check") def test_invalid_iregexp_patterns_are_cached() -> None: search_func = Search(cache_capacity=2) assert len(search_func.cache) == 0 assert search_func("ab123cdef", "\\d+") is False assert len(search_func.cache) == 1 assert search_func.cache["\\d+"] is None def test_debug_regex_patterns() -> None: search_func = Search(cache_capacity=2, debug=True) assert len(search_func.cache) == 0 with pytest.raises((JSONPathError, re.error)): search_func("abcdef", "bc[") def test_cache_capacity() -> None: search_func = Search(cache_capacity=2) assert len(search_func.cache) == 0 assert search_func("1abcdef", "ab[a-z]") assert len(search_func.cache) == 1 assert search_func("2abcdef", "bc[a-z]") assert len(search_func.cache) == 2 # noqa: PLR2004 assert search_func("3abcdef", "cd[a-z]") assert len(search_func.cache) == 2 # noqa: PLR2004 assert "cd[a-z]" in search_func.cache assert "bc[a-z]" in search_func.cache assert "ab[a-z]" not in search_func.cache jg-rp-python-jsonpath-830094f/tests/test_regex_operator.py000066400000000000000000000025361512714264000237630ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/regex_operator.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_regex_operator(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_regex_operator_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_regex_operator_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_relative_json_pointer.py000066400000000000000000000023141512714264000253340ustar00rootroot00000000000000"""Relative JSON Pointer test cases.""" import pytest from jsonpath import JSONPointer from jsonpath import RelativeJSONPointer from jsonpath import RelativeJSONPointerIndexError from jsonpath import RelativeJSONPointerSyntaxError def test_syntax_error() -> None: with pytest.raises(RelativeJSONPointerSyntaxError): RelativeJSONPointer("foo") def test_origin_leading_zero() -> None: with pytest.raises(RelativeJSONPointerSyntaxError): RelativeJSONPointer("01") def test_origin_beyond_pointer() -> None: pointer = JSONPointer("/foo/bar/0") rel = RelativeJSONPointer("9/foo") with pytest.raises(RelativeJSONPointerIndexError): rel.to(pointer) def test_equality() -> None: rel = RelativeJSONPointer("1/foo") assert rel == RelativeJSONPointer("1/foo") def test_zero_index_offset() -> None: with pytest.raises(RelativeJSONPointerSyntaxError): RelativeJSONPointer("1-0") with pytest.raises(RelativeJSONPointerSyntaxError): RelativeJSONPointer("1+0") def test_negative_index_offset() -> None: pointer = JSONPointer("/foo/1") rel = RelativeJSONPointer("0-2") with pytest.raises(RelativeJSONPointerIndexError): rel.to(pointer) jg-rp-python-jsonpath-830094f/tests/test_relative_json_pointer_ietf.py000066400000000000000000000057511512714264000263530ustar00rootroot00000000000000"""Test cases from draft-hha-relative-json-pointer-00. The test cases defined here are taken from draft-hha-relative-json-pointer-00. The appropriate Revised BSD License is included below. Copyright (c) 2023 IETF Trust and the persons identified as authors of the code. All rights reserved.Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of Internet Society, IETF or IETF Trust, nor the names of specific contributors, may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import dataclasses import pytest import jsonpath @dataclasses.dataclass class Case: pointer: str rel: str want: object DOCUMENT = {"foo": ["bar", "baz", "biz"], "highly": {"nested": {"objects": True}}} TEST_CASES = [ Case(pointer="/foo/1", rel="0", want="baz"), Case(pointer="/foo/1", rel="1/0", want="bar"), Case(pointer="/foo/1", rel="0-1", want="bar"), Case(pointer="/foo/1", rel="2/highly/nested/objects", want=True), Case(pointer="/foo/1", rel="0#", want=1), Case(pointer="/foo/1", rel="0+1#", want=2), Case(pointer="/foo/1", rel="1#", want="foo"), Case(pointer="/highly/nested", rel="0/objects", want=True), Case(pointer="/highly/nested", rel="1/nested/objects", want=True), Case(pointer="/highly/nested", rel="2/foo/0", want="bar"), Case(pointer="/highly/nested", rel="0#", want="nested"), Case(pointer="/highly/nested", rel="1#", want="highly"), ] @pytest.mark.parametrize("case", TEST_CASES) def test_relative_pointer_ietf_examples(case: Case) -> None: pointer = jsonpath.JSONPointer(case.pointer) rel = jsonpath.RelativeJSONPointer(case.rel) rel_pointer = rel.to(pointer) assert rel_pointer.resolve(DOCUMENT) == case.want assert pointer.to(case.rel) == rel_pointer assert str(rel) == case.rel jg-rp-python-jsonpath-830094f/tests/test_singular_path_selector.py000066400000000000000000000025761512714264000255020ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/singular_path_selector.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_singular_path_selector(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_singular_path_selector_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_singular_path_selector_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_startswith_function.py000066400000000000000000000024101512714264000250460ustar00rootroot00000000000000import dataclasses import operator from typing import Any from typing import Mapping from typing import Sequence from typing import Union import pytest from jsonpath import JSONPathEnvironment @dataclasses.dataclass class Case: description: str path: str data: Union[Sequence[Any], Mapping[str, Any]] want: Union[Sequence[Any], Mapping[str, Any]] TEST_CASES = [ Case( description="current value start with string", path="$[?startswith(@, 'ab')]", data={"x": "abc", "y": "abx", "z": "bcd", "-": "ab"}, want=["abc", "abx", "ab"], ), Case( description="current key start with string", path="$[?startswith(#, 'ab')]", data={"abc": 1, "abx": 2, "bcd": 3, "ab": 4}, want=[1, 2, 4], ), Case( description="value is not a string", path="$[?startswith(@, 'ab')]", data={"abc": 1, "abx": 2, "bcd": 3, "ab": 4}, want=[], ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_isinstance_function(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) assert path.findall(case.data) == case.want jg-rp-python-jsonpath-830094f/tests/test_strictness.py000066400000000000000000000055751512714264000231450ustar00rootroot00000000000000import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathNameError @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) def test_leading_whitespace(env: JSONPathEnvironment) -> None: query = " $.a" data = {"a": 1} assert env.findall(query, data) == [1] def test_trailing_whitespace(env: JSONPathEnvironment) -> None: query = "$.a " data = {"a": 1} assert env.findall(query, data) == [1] def test_index_as_object_name(env: JSONPathEnvironment) -> None: query = "$.a[0]" data = {"a": {"0": 1}} assert env.findall(query, data) == [1] def test_alternative_and(env: JSONPathEnvironment) -> None: query = "$[?@.a and @.b]" data = [{"a": True, "b": False}] assert env.findall(query, data) == [{"a": True, "b": False}] def test_alternative_or(env: JSONPathEnvironment) -> None: query = "$[?@.a or @.c]" data = [{"a": True, "b": False}, {"c": 99}] assert env.findall(query, data) == [{"a": True, "b": False}, {"c": 99}] def test_alternative_null(env: JSONPathEnvironment) -> None: query = "$[?@.a==Null]" data = [{"a": None, "d": "e"}, {"a": "c", "d": "f"}] assert env.findall(query, data) == [{"a": None, "d": "e"}] def test_none(env: JSONPathEnvironment) -> None: query = "$[?@.a==None]" data = [{"a": None, "d": "e"}, {"a": "c", "d": "f"}] assert env.findall(query, data) == [{"a": None, "d": "e"}] def test_implicit_root_identifier( env: JSONPathEnvironment, ) -> None: query = "a['p']" data = { "a": {"j": [1, 2, 3], "p": {"q": [4, 5, 6]}}, "b": ["j", "p", "q"], } assert env.findall(query, data) == [{"q": [4, 5, 6]}] def test_singular_path_selector_without_root_identifier( env: JSONPathEnvironment, ) -> None: query = "$.a[b[1]]" data = { "a": {"j": [1, 2, 3], "p": {"q": [4, 5, 6]}}, "b": ["j", "p", "q"], "c d": {"x": {"y": 1}}, } assert env.findall(query, data) == [{"q": [4, 5, 6]}] def test_isinstance_is_disabled_in_strict_mode() -> None: env = JSONPathEnvironment(strict=True) query = "$.some[?is(@.thing, 'string')]" with pytest.raises(JSONPathNameError): env.compile(query) query = "$.some[?isinstance(@.thing, 'string')]" with pytest.raises(JSONPathNameError): env.compile(query) def test_typeof_is_disabled_in_strict_mode() -> None: env = JSONPathEnvironment(strict=True) query = "$.some[?type(@.thing) == 'string']" with pytest.raises(JSONPathNameError): env.compile(query) query = "$.some[?typeof(@.thing) == 'string']" with pytest.raises(JSONPathNameError): env.compile(query) def test_startswith_is_disabled_in_strict_mode() -> None: env = JSONPathEnvironment(strict=True) query = "$[?startswith(@, 'ab')]" with pytest.raises(JSONPathNameError): env.compile(query) jg-rp-python-jsonpath-830094f/tests/test_typeof_function.py000066400000000000000000000056721512714264000241550ustar00rootroot00000000000000import asyncio import dataclasses import operator from typing import Any from typing import List from typing import Mapping from typing import Sequence from typing import Union import pytest from jsonpath import JSONPathEnvironment @dataclasses.dataclass class Case: description: str path: str data: Union[Sequence[Any], Mapping[str, Any]] want: Union[Sequence[Any], Mapping[str, Any]] SOME_OBJECT = object() TEST_CASES = [ Case( description="type of a string", path="$.some[?type(@.thing) == 'string']", data={"some": [{"thing": "foo"}]}, want=[{"thing": "foo"}], ), Case( description="not a string", path="$.some[?type(@.thing) == 'string']", data={"some": [{"thing": 1}]}, want=[], ), Case( description="type of undefined", path="$.some[?type(@.other) == 'undefined']", # things without `other` data={"some": [{"thing": "foo"}]}, want=[{"thing": "foo"}], ), Case( description="type of None", path="$.some[?type(@.thing) == 'null']", data={"some": [{"thing": None}]}, want=[{"thing": None}], ), Case( description="type of array-like", path="$.some[?type(@.thing) == 'array']", data={"some": [{"thing": [1, 2, 3]}]}, want=[{"thing": [1, 2, 3]}], ), Case( description="type of mapping", path="$.some[?type(@.thing) == 'object']", data={"some": [{"thing": {"other": 1}}]}, want=[{"thing": {"other": 1}}], ), Case( description="type of bool", path="$.some[?type(@.thing) == 'boolean']", data={"some": [{"thing": True}]}, want=[{"thing": True}], ), Case( description="type of int", path="$.some[?type(@.thing) == 'number']", data={"some": [{"thing": 1}]}, want=[{"thing": 1}], ), Case( description="type of float", path="$.some[?type(@.thing) == 'number']", data={"some": [{"thing": 1.1}]}, want=[{"thing": 1.1}], ), Case( description="none of the above", path="$.some[?type(@.thing) == 'object']", data={"some": [{"thing": SOME_OBJECT}]}, want=[{"thing": SOME_OBJECT}], ), ] @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment() @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_typeof_function(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) assert path.findall(case.data) == case.want @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_typeof_function_async(env: JSONPathEnvironment, case: Case) -> None: path = env.compile(case.path) async def coro() -> List[object]: return await path.findall_async(case.data) assert asyncio.run(coro()) == case.want # TODO: test single_number_type is False jg-rp-python-jsonpath-830094f/tests/test_undefined.py000066400000000000000000000025501512714264000226730ustar00rootroot00000000000000import asyncio import json import operator import pytest from jsonpath import JSONPathEnvironment from jsonpath import JSONPathSyntaxError from jsonpath import NodeList from ._cts_case import Case @pytest.fixture() def env() -> JSONPathEnvironment: return JSONPathEnvironment(strict=False) with open("tests/undefined.json", encoding="utf8") as fd: data = [Case(**case) for case in json.load(fd)["tests"]] @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_undefined_keyword(env: JSONPathEnvironment, case: Case) -> None: assert case.document is not None nodes = NodeList(env.finditer(case.selector, case.document)) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_undefined_keyword_async(env: JSONPathEnvironment, case: Case) -> None: async def coro() -> NodeList: assert case.document is not None it = await env.finditer_async(case.selector, case.document) return NodeList([node async for node in it]) nodes = asyncio.run(coro()) case.assert_nodes(nodes) @pytest.mark.parametrize("case", data, ids=operator.attrgetter("name")) def test_comparison_to_undefined_fails_in_strict_mode(case: Case) -> None: env = JSONPathEnvironment(strict=True) with pytest.raises(JSONPathSyntaxError): env.compile(case.selector) jg-rp-python-jsonpath-830094f/tests/test_walk_filter_expression_tree.py000066400000000000000000000034061512714264000265340ustar00rootroot00000000000000"""Test that we can traverse filter expression trees.""" import dataclasses import operator from typing import List import pytest import jsonpath from jsonpath.filter import BaseExpression from jsonpath.filter import walk from jsonpath.selectors import Filter as FilterSelector @dataclasses.dataclass class Case: description: str path: str want: bool TEST_CASES = [ Case( description="boolean self path", path="$some.thing[?@.foo]", want=True, ), Case( description="infix left self path", path="$some.thing[?@.foo > $.bar]", want=True, ), Case( description="infix left self path", path="$some.thing[?$.bar == @.foo]", want=True, ), Case( description="nested filter self path", path="$some.thing[?$.bar[?@.foo > 1]]", want=True, ), Case( description="self path as filter function argument", path="$some.thing[?match(@.foo, '^bar.+')]", want=True, ), Case( description="boolean root path", path="$some.thing[?$.foo]", want=False, ), ] def is_volatile(expr: BaseExpression) -> bool: return any(expr.volatile for expr in walk(expr)) @pytest.mark.parametrize("case", TEST_CASES, ids=operator.attrgetter("description")) def test_is_volatile(case: Case) -> None: path = jsonpath.compile(case.path) assert isinstance(path, jsonpath.JSONPath) filter_selectors: List[FilterSelector] = [] for segment in path.segments: for selector in segment.selectors: if isinstance(selector, FilterSelector): filter_selectors.append(selector) assert len(filter_selectors) == 1 assert is_volatile(filter_selectors[0].expression) is case.want jg-rp-python-jsonpath-830094f/tests/undefined.json000066400000000000000000000030631512714264000221550ustar00rootroot00000000000000{ "tests": [ { "name": "explicit comparison to undefined", "selector": "$[?@.a == undefined]", "document": [ { "a": "b", "d": "e" }, { "b": "c", "d": "f" } ], "result": [ { "b": "c", "d": "f" } ], "result_paths": ["$[1]"], "tags": ["extra"] }, { "name": "explicit comparison to missing", "selector": "$[?@.a == missing]", "document": [ { "a": "b", "d": "e" }, { "b": "c", "d": "f" } ], "result": [ { "b": "c", "d": "f" } ], "result_paths": ["$[1]"], "tags": ["extra"] }, { "name": "explicit undefined is on the left", "selector": "$[?undefined == @.a]", "document": [ { "a": "b", "d": "e" }, { "b": "c", "d": "f" } ], "result": [ { "b": "c", "d": "f" } ], "result_paths": ["$[1]"], "tags": ["extra"] }, { "name": "not equal to undefined", "selector": "$[?@.a != undefined]", "document": [ { "a": "b", "d": "e" }, { "b": "c", "d": "f" } ], "result": [ { "a": "b", "d": "e" } ], "result_paths": ["$[0]"], "tags": ["extra"] } ] }