pax_global_header00006660000000000000000000000064141577415530014526gustar00rootroot0000000000000052 comment=6ed2ef48ed38679bcdafe7cae250a2ef4b315e7b rdflib-6.1.1/000077500000000000000000000000001415774155300127755ustar00rootroot00000000000000rdflib-6.1.1/.dockerignore000066400000000000000000000000341415774155300154460ustar00rootroot00000000000000.tox .venv .mypy_cache .git rdflib-6.1.1/.drone.yml000066400000000000000000000047151415774155300147140ustar00rootroot00000000000000--- kind: pipeline name: python-3-7 type: docker platform: os: linux arch: amd64 steps: - name: test image: python:3.7 environment: COVERALLS_SERVICE_NAME: RDFLib-Drone COVERALLS_REPO_TOKEN: from_secret: coveralls_token commands: - export COVERALLS_SERVICE_NUMBER="$DRONE_BUILD_NUMBER" - export COVERALLS_SERVICE_JOB_ID="$DRONE_STAGE_NAME" - export COVERALLS_SERVICE_JOB_NUMBER="$DRONE_BUILD_NUMBER" - export COVERALLS_FLAG_NAME="$DRONE_STAGE_KIND" - export COVERALLS_GIT_REPO="$DRONE_REPO_NAME" - export COVERALLS_GIT_BRANCH="$DRONE_SOURCE_BRANCH" - export CI_BRANCH="$DRONE_SOURCE_BRANCH" - apt-get update && apt-get install -y openjdk-11-jdk-headless - pip install --default-timeout 60 -r requirements.txt - pip install --default-timeout 60 -r requirements.dev.txt - pip install --default-timeout 60 coveralls && export HAS_COVERALLS=1 - python setup.py install - black --config black.toml --check ./rdflib || true - flake8 --exit-zero rdflib - mypy --show-error-context --show-error-codes rdflib - ./with-fuseki.sh pytest -ra --cov - coveralls --- kind: pipeline name: python-3-8 type: docker platform: os: linux arch: amd64 steps: - name: test image: python:3.8 commands: - apt-get update && apt-get install -y openjdk-11-jdk-headless - pip install --default-timeout 60 -r requirements.txt - pip install --default-timeout 60 -r requirements.dev.txt - python setup.py install - black --config black.toml --check ./rdflib || true - flake8 --exit-zero rdflib - ./with-fuseki.sh pytest -ra --- kind: pipeline name: python-3-9 type: docker platform: os: linux arch: amd64 steps: - name: test image: python:3.9 commands: - apt-get update && apt-get install -y openjdk-11-jdk-headless - pip install --default-timeout 60 -r requirements.txt - pip install --default-timeout 60 -r requirements.dev.txt - python setup.py install - black --config black.toml --check ./rdflib || true - flake8 --exit-zero rdflib - ./with-fuseki.sh pytest -ra --- kind: pipeline name: python-3-10 type: docker platform: os: linux arch: amd64 steps: - name: test image: python:3.10 commands: - apt-get update && apt-get install -y openjdk-11-jdk-headless - pip install --default-timeout 60 -r requirements.txt - pip install --default-timeout 60 -r requirements.dev.txt - python setup.py install - black --config black.toml --check ./rdflib | true - flake8 --exit-zero rdflib - ./with-fuseki.sh pytest -ra rdflib-6.1.1/.editorconfig000066400000000000000000000012521415774155300154520ustar00rootroot00000000000000# EditorConfig is awesome: # http://EditorConfig.org # top-most EditorConfig file root = true # Unix-style newlines with a newline ending every file [*] end_of_line = lf insert_final_newline = true trim_trailing_whitespace = true # Leave line endings as-is in Markdown and ReStructuredText files [*.{md, rst}] charset = utf-8 trim_trailing_whitespace = false # Matches multiple files with brace expansion notation # Set default charset [*.{js, py, pyi, toml, yml, yaml}] charset = utf-8 [*.{yaml, yml}] indent_style = space indent_size = 2 # 4 space indentation [*.py] indent_style = space indent_size = 4 max_line_length = 88 # tab indentation [Makefile] indent_style = tab rdflib-6.1.1/.flake8000066400000000000000000000004601415774155300141500ustar00rootroot00000000000000# https://flake8.pycqa.org/en/latest/user/configuration.html [flake8] extend-ignore = # E501: line too long # Disabled so that black can control line length. E501, # Ignored since this is soon not going to be considered an error, see https://www.flake8rules.com/rules/W503.html W503, rdflib-6.1.1/.github/000077500000000000000000000000001415774155300143355ustar00rootroot00000000000000rdflib-6.1.1/.github/dependabot.yml000066400000000000000000000003121415774155300171610ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: weekly open-pull-requests-limit: 10 ignore: - dependency-name: sphinx versions: - 3.4.3 - 3.5.2 rdflib-6.1.1/.github/workflows/000077500000000000000000000000001415774155300163725ustar00rootroot00000000000000rdflib-6.1.1/.github/workflows/validate.yaml000066400000000000000000000046751415774155300210630ustar00rootroot00000000000000name: Validate on: push: branches: ["master"] pull_request: workflow_dispatch: env: FORCE_COLOR: 1 XDG_CACHE_HOME: ${{ github.workspace }}/cache jobs: validate: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: ["3.7", "3.8", "3.9"] os: [ubuntu-latest, macos-latest, windows-latest] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - uses: actions/setup-java@v2 with: distribution: "temurin" java-version: "17" - name: Get pip cache dir id: pip-cache shell: bash run: | python -m ensurepip --upgrade echo "::set-output name=dir::$(pip cache dir)" - name: Cache pip uses: actions/cache@v2 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ matrix.os }}-pip-${{ matrix.python-version }}-v1-${{ hashFiles('**/setup.py', '**/requirements*.txt') }} restore-keys: | ${{ matrix.os }}-pip-${{ matrix.python-version }}-v1- - name: Cache xdg uses: actions/cache@v2 with: path: ${{ env.XDG_CACHE_HOME }} key: ${{ matrix.os }}-xdg-v1-${{ hashFiles('**/with-fuseki.sh') }} restore-keys: | ${{ matrix.os }}-xdg-v1- - name: Install dependencies shell: bash run: | # bash .travis.fuseki_install_optional.sh pip install --default-timeout 60 -r requirements.txt if [ "${{ matrix.os }}" == "ubuntu-latest" ] then sudo apt-get install -y libdb-dev elif [ "${{ matrix.os }}" == "macos-latest" ] then brew install berkeley-db@4 export BERKELEYDB_DIR=$(brew --prefix berkeley-db@4) fi pip install --default-timeout 60 -r requirements.dev.txt pip install networkx python setup.py install - name: Validate shell: bash run: | black --config black.toml --check ./rdflib || true flake8 --exit-zero rdflib mypy --show-error-context --show-error-codes rdflib if [ "${{ matrix.os }}" == "windows-latest" ] then pytest -ra --cov else ./with-fuseki.sh pytest -ra --cov fi rdflib-6.1.1/.gitignore000066400000000000000000000044221415774155300147670ustar00rootroot00000000000000RDFLib.sublime-project /docs/_build/ RDFLib.sublime-workspace coverage/ /.hgtags /.hgignore build/ /docs/draft/ *~ test_reports/*latest.ttl # PyCharm .idea/ prepare_changelog.sh #### vimdiff <(curl --silent -L https://github.com/github/gitignore/raw/master/Python.gitignore) .gitignore # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ cover/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder .pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv # For a library or package, you might want to ignore these files since the code is # intended to run in multiple environments; otherwise, check them in: # .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ # pytype static type analyzer .pytype/ # Cython debug symbols cython_debug/ rdflib-6.1.1/CHANGELOG.md000066400000000000000000002516261415774155300146220ustar00rootroot000000000000002021-12-20 RELEASE 6.1.1 ======================== Better testing and tidier code. This is a semi-major release that: * add support for Python 3.10 * updates the test suite to pytest (from nose) * tidies up a lot of continuous integration * gets more tests tested, not skipped * implements lots of mypy tests * updates several parsers and serializers * supports the new HexTuples format! * many bug fixes This release contains many, many hours of updates from Iwan Aucamp, so thank you Iwan! PRs merged since last release: * Update the guidelines for writing tests [PR #1517](https://github.com/RDFLib/rdflib/pull/1517) * Updated tox config to run mypy in default environment [PR #1450](https://github.com/RDFLib/rdflib/pull/1450) * Add type annotations to constructor parameters in Literal [PR #1498](https://github.com/RDFLib/rdflib/pull/1498) * Increase fuseki start timeout from 15 to 30 seconds [PR #1516](https://github.com/RDFLib/rdflib/pull/1516) * Forbid truthy values for lang when initializing Literal [PR #1494](https://github.com/RDFLib/rdflib/pull/1494) * Add Py 3.10 to testing envs [PR #1473](https://github.com/RDFLib/rdflib/pull/1473) * Add mypy to GitHub actions validate workflow [PR #1512](https://github.com/RDFLib/rdflib/pull/1512) * Improve error messages from with-fuseki.sh [PR #1510](https://github.com/RDFLib/rdflib/pull/1510) * Fix pipeline triggers [PR #1511](https://github.com/RDFLib/rdflib/pull/1511) * Change python version used for mypy to 3.7 [PR #1514](https://github.com/RDFLib/rdflib/pull/1514) * Quench nt test userwarn [PR #1500](https://github.com/RDFLib/rdflib/pull/1500) * Raise a more specific Exception when lang isn't valid [PR #1497](https://github.com/RDFLib/rdflib/pull/1497) * Fix for issue893 [PR #1504](https://github.com/RDFLib/rdflib/pull/1504) * Fix for issue 893 [PR #1501](https://github.com/RDFLib/rdflib/pull/1501) * Re-make of nicholascar's “Concise Bounded Description” PR #968 ... [PR #1502](https://github.com/RDFLib/rdflib/pull/1502) * Remove deprecated Statement class [PR #1496](https://github.com/RDFLib/rdflib/pull/1496) * Fix BNode.skolemize() returning a URIRef instead of an RDFLibGenid. [PR #1493](https://github.com/RDFLib/rdflib/pull/1493) * demo 980 resolution [PR #1495](https://github.com/RDFLib/rdflib/pull/1495) * Hextuples Serializer [PR #1489](https://github.com/RDFLib/rdflib/pull/1489) * Add bindings for rdflib namespaces. Import DCAM. [PR #1491](https://github.com/RDFLib/rdflib/pull/1491) * fix for issue 1484 raised and solved by Graham Klyne: [PR #1490](https://github.com/RDFLib/rdflib/pull/1490) * SDO HTTPS and DN creator script [PR #1485](https://github.com/RDFLib/rdflib/pull/1485) * Fix typing of create_input_source [PR #1487](https://github.com/RDFLib/rdflib/pull/1487) * guess_format() cater for JSON-LD files ending .json-ld [PR #1486](https://github.com/RDFLib/rdflib/pull/1486) * Add GitHub actions workflow for validation [PR #1461](https://github.com/RDFLib/rdflib/pull/1461) * Improved script for running with fuseki [PR #1476](https://github.com/RDFLib/rdflib/pull/1476) * RFC: Add PythonInputSource to create py-based graphs [PR #1463](https://github.com/RDFLib/rdflib/pull/1463) * Adapt for pytest and add back import of os in rdflib/parser.py [PR #1480](https://github.com/RDFLib/rdflib/pull/1480) * Make the test pass on windows [PR #1478](https://github.com/RDFLib/rdflib/pull/1478) * Add type hints [PR #1449](https://github.com/RDFLib/rdflib/pull/1449) * Fix shield for CI status [PR #1474](https://github.com/RDFLib/rdflib/pull/1474) * Fix test files with bare code [PR #1481](https://github.com/RDFLib/rdflib/pull/1481) * Remove some remaining nosetest import [PR #1482](https://github.com/RDFLib/rdflib/pull/1482) * Fix JSON-LD data import adds trailing slashes to IRIs (#1443) [PR #1456](https://github.com/RDFLib/rdflib/pull/1456) * Iwana 20211114 t1305 pytestx [PR #1460](https://github.com/RDFLib/rdflib/pull/1460) * Migrate from nosetest to pytest [PR #1452](https://github.com/RDFLib/rdflib/pull/1452) * Add import of os [PR #1464](https://github.com/RDFLib/rdflib/pull/1464) * replace pkg_resources with importlib.metadata [PR #1445](https://github.com/RDFLib/rdflib/pull/1445) * A new Turtle serializer [PR #1425](https://github.com/RDFLib/rdflib/pull/1425) * Fix typos discovered by codespell [PR #1446](https://github.com/RDFLib/rdflib/pull/1446) * Use assertTrue instead of assert_ for python 3.11 compatibility. [PR #1448](https://github.com/RDFLib/rdflib/pull/1448) * Undefined name: tmppath --> self.tmppath [PR #1438](https://github.com/RDFLib/rdflib/pull/1438) * Fix Graph.parse URL handling on windows [PR #1441](https://github.com/RDFLib/rdflib/pull/1441) * Make Store.namespaces an empty generator [PR #1432](https://github.com/RDFLib/rdflib/pull/1432) * Export DCMITYPE [PR #1433](https://github.com/RDFLib/rdflib/pull/1433) 2021-12-20 RELEASE 6.1.0 ======================== A slightly messed-up release of what is now 6.1.1. Do not use! 2021-10-10 RELEASE 6.0.2 ======================== Minor release to add OWL.rational & OWL.real which are needed to allow the OWL-RL package to use only rdflib namespaces, not it's own versions. * Add owl:rational and owl:real to match standard. [PR #1428](https://github.com/RDFLib/rdflib/pull/1428) A few other small things have been added, see the following merged PRs list: * rename arg LOVE to ns in rdfpipe [PR #1426](https://github.com/RDFLib/rdflib/pull/1426) * Remove Tox reference to Python 3.6 [PR #1422](https://github.com/RDFLib/rdflib/pull/1422) * Add Brick DefinedNamespace [PR #1419](https://github.com/RDFLib/rdflib/pull/1419) * Use setName on TokenConverter to set the name property [PR #1409](https://github.com/RDFLib/rdflib/pull/1409) * Add test for adding JSON-LD to guess_format() [PR #1408](https://github.com/RDFLib/rdflib/pull/1408) * Fix mypy type errors and add mypy to .drone.yml [PR #1407](https://github.com/RDFLib/rdflib/pull/1407) 2021-09-17 RELEASE 6.0.1 ======================== Minor release to fix a few small errors, in particular with JSON-LD parsing & serializing integration from rdflib-jsonld. Also, a few other niceties, such as allowing graph `add()`, `remove()` etc. to be chainable. * Add test for adding JSON-LD to guess_format() [PR #1408](https://github.com/RDFLib/rdflib/pull/1408) * Add JSON-LD to guess_format() [PR #1403](https://github.com/RDFLib/rdflib/pull/1403) * add dateTimeStamp, fundamental & constraining facets, 7-prop data model [PR #1399](https://github.com/RDFLib/rdflib/pull/1399) * fix: remove log message on import [PR #1398](https://github.com/RDFLib/rdflib/pull/1398) * Make graph and other methods chainable [PR #1394](https://github.com/RDFLib/rdflib/pull/1394) * fix: use correct name for json-ld [PR #1388](https://github.com/RDFLib/rdflib/pull/1388) * Allowing Container Membership Properties in RDF namespace (#873) [PR #1386](https://github.com/RDFLib/rdflib/pull/1386) * Update intro_to_sparql.rst [PR #1386](https://github.com/RDFLib/rdflib/pull/1384) * Iterate over dataset return quads [PR #1382](https://github.com/RDFLib/rdflib/pull/1382) 2021-07-20 RELEASE 6.0.0 ======================== 6.0.0 is a major stable release that drops support for Python 2 and Python 3 < 3.7. Type hinting is now present in much of the toolkit as a result. It includes the formerly independent JSON-LD parser/serializer, improvements to Namespaces that allow for IDE namespace prompting, simplified use of `g.serialize()` (turtle default, no need to `decode()`) and many other updates to documentation, store backends and so on. Performance of the in-memory store has also improved since Python 3.6 dictionary improvements. There are numerous supplementary improvements to the toolkit too, such as: * inclusion of Docker files for easier CI/CD * black config files for standardised code formatting * improved testing with mock SPARQL stores, rather than a reliance on DBPedia etc _**All PRs merged since 5.0.0:**_ * Fixes 1190 - pin major version of pyparsing [PR #1366](https://github.com/RDFLib/rdflib/pull/1366) * Add __init__ for shared jsonld module [PR #1365](https://github.com/RDFLib/rdflib/pull/1365) * Update README with chat info [PR #1363](https://github.com/RDFLib/rdflib/pull/1363) * add xsd dayTimeDuration and yearMonthDuration [PR #1364](https://github.com/RDFLib/rdflib/pull/1364) * Updated film.py [PR #1359](https://github.com/RDFLib/rdflib/pull/1359) * Migration from ClosedNamespace to DeclaredNamespace [PR #1074](https://github.com/RDFLib/rdflib/pull/1074) * Add @expectedFailure unit tests for #1294 and type annotations for compare.py [PR #1346](https://github.com/RDFLib/rdflib/pull/1346) * JSON-LD Integration [PR #1354](https://github.com/RDFLib/rdflib/pull/1354) * ENH: Make ClosedNamespace extend Namespace [PR #1213](https://github.com/RDFLib/rdflib/pull/1213) * Add unit test for #919 and more type hints for sparqlconnector and sparqlstore [PR #1348](https://github.com/RDFLib/rdflib/pull/1348) * fix #876 Updated term.py to add xsd:normalizedString and xsd:token support for Literals [PR #1102](https://github.com/RDFLib/rdflib/pull/1102) * Dev stack update [PR #1355](https://github.com/RDFLib/rdflib/pull/1355) * Add make coverage instructions to README [PR #1353](https://github.com/RDFLib/rdflib/pull/1353) * Improve running tests locally [PR #1352](https://github.com/RDFLib/rdflib/pull/1352) * support day, month and year function for date [PR #1154](https://github.com/RDFLib/rdflib/pull/1154) * Prevent `from_n3` from unescaping `\xhh` [PR #1343](https://github.com/RDFLib/rdflib/pull/1343) * Complete clean up of docs for 6.0.0 [PR #1296](https://github.com/RDFLib/rdflib/pull/1296) * pathname2url removal [PR #1288](https://github.com/RDFLib/rdflib/pull/1288) * Replace Sleepycat with BerkeleyDB [PR #1347](https://github.com/RDFLib/rdflib/pull/1347) * Replace use of DBPedia with the new SimpleHTTPMock [PR #1345](https://github.com/RDFLib/rdflib/pull/1345) * Update graph operator overloading for subclasses [PR #1349](https://github.com/RDFLib/rdflib/pull/1349) * Speedup Literal.__hash__ and Literal.__eq__ by accessing directly _da… [PR #1321](https://github.com/RDFLib/rdflib/pull/1321) * Implemented function translateAlgebra. This functions takes a SPARQL … [PR #1322](https://github.com/RDFLib/rdflib/pull/1322) * attempt at adding coveralls support to drone runs [PR #1337](https://github.com/RDFLib/rdflib/pull/1337) * Fix SPARQL update parsing to handle arbitrary amounts of triples in inserts [PR #1340](https://github.com/RDFLib/rdflib/pull/1340) * Add pathlib.PurePath support for Graph.serialize and Graph.parse [PR #1309](https://github.com/RDFLib/rdflib/pull/1309) * dataset examples file [PR #1289](https://github.com/RDFLib/rdflib/pull/1289) * Add handling for 308 (Permanent Redirect) [PR #1342](https://github.com/RDFLib/rdflib/pull/1342) * Speedup of __add_triple_context [PR #1320](https://github.com/RDFLib/rdflib/pull/1320) * Fix prov ns [PR #1318](https://github.com/RDFLib/rdflib/pull/1318) * Speedup __ctx_to_str. [PR #1319](https://github.com/RDFLib/rdflib/pull/1319) * Speedup decodeUnicodeEscape by avoiding useless string replace. [PR #1324](https://github.com/RDFLib/rdflib/pull/1324) * Fix errors reported by mypy [PR #1330](https://github.com/RDFLib/rdflib/pull/1330) * Require setuptools, rdflib/plugins/sparql/__init__.py and rdflib/plugin.py import pkg_resources [PR #1339](https://github.com/RDFLib/rdflib/pull/1339) * Fix tox config [PR #1313](https://github.com/RDFLib/rdflib/pull/1313) * Fix formatting of xsd:decimal [PR #1335](https://github.com/RDFLib/rdflib/pull/1335) * Add tests for issue #1299 [PR #1328](https://github.com/RDFLib/rdflib/pull/1328) * Add special handling for gYear and gYearMonth [PR #1315](https://github.com/RDFLib/rdflib/pull/1315) * Replace incomplete example in intro_to_sparql.rst [PR #1331](https://github.com/RDFLib/rdflib/pull/1331) * Added unit test for issue #977. [PR #1112](https://github.com/RDFLib/rdflib/pull/1112) * Don't sort variables in TXTResultSerializer [PR #1310](https://github.com/RDFLib/rdflib/pull/1310) * handle encoding of base64Binary Literals [PR #1258](https://github.com/RDFLib/rdflib/pull/1258) * Add tests for Graph.transitive_{subjects,objects} [PR #1307](https://github.com/RDFLib/rdflib/pull/1307) * Changed to support passing fully qualified queries through the graph … [PR #1253](https://github.com/RDFLib/rdflib/pull/1253) * Upgrade to GitHub-native Dependabot [PR #1298](https://github.com/RDFLib/rdflib/pull/1298) * Fix transitive_objects/subjects docstrings and signatures [PR #1305](https://github.com/RDFLib/rdflib/pull/1305) * Fix typo in ClosedNamespace doc string [PR #1293](https://github.com/RDFLib/rdflib/pull/1293) * Allow parentheses in uri [PR #1280](https://github.com/RDFLib/rdflib/pull/1280) * Add notes about how to install from git [PR #1286](https://github.com/RDFLib/rdflib/pull/1286) * Feature/forward version to 6.0.0-alpha [PR #1285](https://github.com/RDFLib/rdflib/pull/1285) * speedup notation3/turtle parser [PR #1272](https://github.com/RDFLib/rdflib/pull/1272) * Correct behaviour of compute_qname for URNs [PR #1274](https://github.com/RDFLib/rdflib/pull/1274) * Speedup __add_triple_context. [PR #1271](https://github.com/RDFLib/rdflib/pull/1271) * Feature/coverage configuration [PR #1267](https://github.com/RDFLib/rdflib/pull/1267) * optimize sparql.Bindings [PR #1192](https://github.com/RDFLib/rdflib/pull/1192) * issue_771_add_key_error_if_spaces [PR #1070](https://github.com/RDFLib/rdflib/pull/1070) * Typo fix [PR #1254](https://github.com/RDFLib/rdflib/pull/1254) * Adding Namespace.__contains__() [PR #1237](https://github.com/RDFLib/rdflib/pull/1237) * Add a Drone config file. [PR #1247](https://github.com/RDFLib/rdflib/pull/1247) * Add sentence on names not valid as Python IDs. [PR #1234](https://github.com/RDFLib/rdflib/pull/1234) * Add trig mimetype [PR #1238](https://github.com/RDFLib/rdflib/pull/1238) * Move flake8 config [PR #1239](https://github.com/RDFLib/rdflib/pull/1239) * Update SPARQL tests since the DBpedia was updated [PR #1240](https://github.com/RDFLib/rdflib/pull/1240) * fix foaf ClosedNamespace [PR #1220](https://github.com/RDFLib/rdflib/pull/1220) * add GeoSPARQL ClosedNamespace [PR #1221](https://github.com/RDFLib/rdflib/pull/1221) * docs: fix simple typo, -> yield [PR #1223](https://github.com/RDFLib/rdflib/pull/1223) * do not use current time in sparql TIMEZONE [PR #1193](https://github.com/RDFLib/rdflib/pull/1193) * Reset graph on exit from context [PR #1206](https://github.com/RDFLib/rdflib/pull/1206) * Fix usage of default-graph for POST and introduce POST_FORM [PR #1185](https://github.com/RDFLib/rdflib/pull/1185) * Changes to graph.serialize() [PR #1183](https://github.com/RDFLib/rdflib/pull/1183) * rd2dot Escape HTML in node label and URI text [PR #1209](https://github.com/RDFLib/rdflib/pull/1209) * tests: retry on network error (CI) [PR #1203](https://github.com/RDFLib/rdflib/pull/1203) * Add documentation and type hints for rdflib.query.Result and rdflib.graph.Graph [PR #1211](https://github.com/RDFLib/rdflib/pull/1211) * fix typo [PR #1218](https://github.com/RDFLib/rdflib/pull/1218) * Add architecture ppc64le to travis build [PR #1212](https://github.com/RDFLib/rdflib/pull/1212) * small cleanups [PR #1191](https://github.com/RDFLib/rdflib/pull/1191) * Remove the usage of assert in the SPARQLConnector [PR #1186](https://github.com/RDFLib/rdflib/pull/1186) * Remove requests [PR #1175](https://github.com/RDFLib/rdflib/pull/1175) * Support parsing paths specified with pathlib [PR #1180](https://github.com/RDFLib/rdflib/pull/1180) * URI Validation Performance Improvements [PR #1177](https://github.com/RDFLib/rdflib/pull/1177) * Fix serialize with multiple disks on windows [PR #1172](https://github.com/RDFLib/rdflib/pull/1172) * Fix for issue #629 - Arithmetic Operations of DateTime in SPARQL [PR #1061](https://github.com/RDFLib/rdflib/pull/1061) * Fixes #1043. [PR #1054](https://github.com/RDFLib/rdflib/pull/1054) * N3 parser: do not create formulas if the Turtle mode is activated [PR #1142](https://github.com/RDFLib/rdflib/pull/1142) * Move to using graph.parse() rather than deprecated graph.load() [PR #1167](https://github.com/RDFLib/rdflib/pull/1167) * Small improvement to serialize docs [PR #1162](https://github.com/RDFLib/rdflib/pull/1162) * Issue 1160 missing url fragment [PR #1163](https://github.com/RDFLib/rdflib/pull/1163) * remove import side-effects [PR #1156](https://github.com/RDFLib/rdflib/pull/1156) * Docs update [PR #1161](https://github.com/RDFLib/rdflib/pull/1161) * replace cgi by html, fixes issue #1110 [PR #1152](https://github.com/RDFLib/rdflib/pull/1152) * Deprecate some more Graph API surface [PR #1151](https://github.com/RDFLib/rdflib/pull/1151) * Add deprecation warning on graph.load() [PR #1150](https://github.com/RDFLib/rdflib/pull/1150) * Remove all remnants of Python2 compatibility [PR #1149](https://github.com/RDFLib/rdflib/pull/1149) * make csv2rdf work in py3 [PR #1117](https://github.com/RDFLib/rdflib/pull/1117) * Add a __dir__ attribute to a closed namespace [PR #1134](https://github.com/RDFLib/rdflib/pull/1134) * improved Graph().parse() [PR #1140](https://github.com/RDFLib/rdflib/pull/1140) * Discussion around new dict-based store implementation [PR #1133](https://github.com/RDFLib/rdflib/pull/1133) * fix 913 [PR #1139](https://github.com/RDFLib/rdflib/pull/1139) * Make parsers CharacterStream aware [PR #1145](https://github.com/RDFLib/rdflib/pull/1145) * More Black formatting changes [PR #1146](https://github.com/RDFLib/rdflib/pull/1146) * Fix comment [PR #1130](https://github.com/RDFLib/rdflib/pull/1130) * Updating namespace.py to solve issue #801 [PR #1044](https://github.com/RDFLib/rdflib/pull/1044) * Fix namespaces for SOSA and SSN. Fix #1126. [PR #1128](https://github.com/RDFLib/rdflib/pull/1128) * Create pull request template [PR #1114](https://github.com/RDFLib/rdflib/pull/1114) * BNode context dicts for NT and N-Quads parsers [PR #1108](https://github.com/RDFLib/rdflib/pull/1108) * Allow distinct blank node contexts from one NTriples parser to the next (#980) [PR #1107](https://github.com/RDFLib/rdflib/pull/1107) * Autodetect parse() format [PR #1046](https://github.com/RDFLib/rdflib/pull/1046) * fix #910: Updated evaluate.py so that union includes results of both branches, even when identical. [PR #1057](https://github.com/RDFLib/rdflib/pull/1057) * Removal of six & styling [PR #1051](https://github.com/RDFLib/rdflib/pull/1051) * Add SERVICE clause to documentation [PR #1041](https://github.com/RDFLib/rdflib/pull/1041) * add test with ubuntu 20.04 [PR #1038](https://github.com/RDFLib/rdflib/pull/1038) * Improved logo [PR #1037](https://github.com/RDFLib/rdflib/pull/1037) * Add requests to the tests_requirements [PR #1036](https://github.com/RDFLib/rdflib/pull/1036) * Set update endpoint similar to query endpoint for sparqlstore if only one is given [PR #1033](https://github.com/RDFLib/rdflib/pull/1033) * fix shebang typo [PR #1034](https://github.com/RDFLib/rdflib/pull/1034) * Add the content type 'application/sparql-update' when preparing a SPARQL update request [PR #1022](https://github.com/RDFLib/rdflib/pull/1022) * Fix typo in README.md [PR #1030](https://github.com/RDFLib/rdflib/pull/1030) * add Python 3.8 [PR #1023](https://github.com/RDFLib/rdflib/pull/1023) * Fix n3 parser exponent syntax of floats with leading dot. [PR #1012](https://github.com/RDFLib/rdflib/pull/1012) * DOC: Use sphinxcontrib-apidoc and various cleanups [PR #1010](https://github.com/RDFLib/rdflib/pull/1010) * FIX: Change is comparison to == for tuple [PR #1009](https://github.com/RDFLib/rdflib/pull/1009) * Update copyright year in docs conf.py [PR #1006](https://github.com/RDFLib/rdflib/pull/1006) 2020-04-18 RELEASE 5.0.0 ======================== 5.0.0 is a major stable release and is the last release to support Python 2 & 3.4. 5.0.0 is mostly backwards- compatible with 4.2.2 and is intended for long-term, bug fix only support. 5.0.0 comes two weeks after the 5.0.0RC1 and includes a small number of additional bug fixes. Note that rdflib-jsonld has released a version 0.5.0 to be compatible with rdflib 5.0.0. _**All PRs merged since 5.0.0RC1:**_ ### General Bugs Fixed: * Fix n3 parser exponent syntax of floats with leading dot. [PR #1012](https://github.com/RDFLib/rdflib/pull/1012) * FIX: Change is comparison to == for tuple [PR #1009](https://github.com/RDFLib/rdflib/pull/1009) * fix #913 : Added _parseBoolean function to enforce correct Lexical-to-value mapping [PR #995](https://github.com/RDFLib/rdflib/pull/995) ### Enhanced Features: * Issue 1003 [PR #1005](https://github.com/RDFLib/rdflib/pull/1005) ### SPARQL Fixes: * CONSTRUCT resolve with initBindings fixes #1001 [PR #1002](https://github.com/RDFLib/rdflib/pull/1002) ### Documentation Fixes: * DOC: Use sphinxcontrib-apidoc and various cleanups [PR #1010](https://github.com/RDFLib/rdflib/pull/1010) * Update copyright year in docs conf.py [PR #1006](https://github.com/RDFLib/rdflib/pull/1006) * slightly improved styling, small index text changes [PR #1004](https://github.com/RDFLib/rdflib/pull/1004) 2020-04-04 RELEASE 5.0.0RC1 =========================== After more than three years, RDFLib 5.0.0rc1 is finally released. This is a rollup of all of the bugfixes merged, and features introduced to RDFLib since RDFLib 4.2.2 was released in Jan 2017. While all effort was taken to minimize breaking changes in this release, there are some. Please see the upgrade4to5 document in the docs directory for more information on some specific differences from 4.2.2 to 5.0.0. _**All issues closed and PRs merged since 4.2.2:**_ ### General Bugs Fixed: * Pr 451 redux [PR #978](https://github.com/RDFLib/rdflib/pull/978) * NTriples fails to parse URIs with only a scheme [ISSUE #920](https://github.com/RDFLib/rdflib/issues/920), [PR #974](https://github.com/RDFLib/rdflib/pull/974) * Cannot clone on windows - Remove colons from test result files. [ISSUE #901](https://github.com/RDFLib/rdflib/issues/901), [PR #971](https://github.com/RDFLib/rdflib/pull/971) * Add requirement for requests to setup.py [PR #969](https://github.com/RDFLib/rdflib/pull/969) * fixed URIRef including native unicode characters [PR #961](https://github.com/RDFLib/rdflib/pull/961) * DCTERMS.format not working [ISSUE #932](https://github.com/RDFLib/rdflib/issues/932) * infixowl.manchesterSyntax do not encode strings [PR #906](https://github.com/RDFLib/rdflib/pull/906) * Fix blank node label to not contain '_:' during parsing [PR #886](https://github.com/RDFLib/rdflib/pull/886) * rename new SPARQLWrapper to SPARQLConnector [PR #872](https://github.com/RDFLib/rdflib/pull/872) * Fix #859. Unquote and Uriquote Literal Datatype. [PR #860](https://github.com/RDFLib/rdflib/pull/860) * Parsing nquads [ISSUE #786](https://github.com/RDFLib/rdflib/issues/786) * ntriples spec allows for upper-cased lang tag, fixes #782 [PR #784](https://github.com/RDFLib/rdflib/pull/784), [ISSUE #782](https://github.com/RDFLib/rdflib/issues/782) * Adds escaped single quote to literal parser [PR #736](https://github.com/RDFLib/rdflib/pull/736) * N3 parse error on single quote within single quotes [ISSUE #732](https://github.com/RDFLib/rdflib/issues/732) * Fixed #725 [PR #730](https://github.com/RDFLib/rdflib/pull/730) * test for issue #725: canonicalization collapses BNodes [PR #726](https://github.com/RDFLib/rdflib/pull/726) * RGDA1 graph canonicalization sometimes still collapses distinct BNodes [ISSUE #725](https://github.com/RDFLib/rdflib/issues/725) * Accept header should use a q parameter [PR #720](https://github.com/RDFLib/rdflib/pull/720) * Added test for Issue #682 and fixed. [PR #718](https://github.com/RDFLib/rdflib/pull/718) * Incompatibility with Python3: unichr [ISSUE #687](https://github.com/RDFLib/rdflib/issues/687) * namespace.py include colon in ALLOWED_NAME_CHARS [PR #663](https://github.com/RDFLib/rdflib/pull/663) * namespace.py fix compute_qname missing namespaces [PR #649](https://github.com/RDFLib/rdflib/pull/649) * RDFa parsing Error! `__init__()` got an unexpected keyword argument 'encoding' [ISSUE #639](https://github.com/RDFLib/rdflib/issues/639) * Bugfix: `term.Literal.__add__` [PR #451](https://github.com/RDFLib/rdflib/pull/451) * fixup of #443 [PR #445](https://github.com/RDFLib/rdflib/pull/445) * Microdata to rdf second edition bak [PR #444](https://github.com/RDFLib/rdflib/pull/444) ### Enhanced Features: * Register additional serializer plugins for SPARQL mime types. [PR #987](https://github.com/RDFLib/rdflib/pull/987) * Pr 388 redux [PR #979](https://github.com/RDFLib/rdflib/pull/979) * Allows RDF terms introduced by JSON-LD 1.1 [PR #970](https://github.com/RDFLib/rdflib/pull/970) * make SPARQLConnector work with DBpedia [PR #941](https://github.com/RDFLib/rdflib/pull/941) * ClosedNamespace returns right exception for way of access [PR #866](https://github.com/RDFLib/rdflib/pull/866) * Not adding all namespaces for n3 serializer [PR #832](https://github.com/RDFLib/rdflib/pull/832) * Adds basic support of xsd:duration [PR #808](https://github.com/RDFLib/rdflib/pull/808) * Add possibility to set authority and basepath to skolemize graph [PR #807](https://github.com/RDFLib/rdflib/pull/807) * Change notation3 list realization to non-recursive function. [PR #805](https://github.com/RDFLib/rdflib/pull/805) * Suppress warning for not using custom encoding. [PR #800](https://github.com/RDFLib/rdflib/pull/800) * Add support to parsing large xml inputs [ISSUE #749](https://github.com/RDFLib/rdflib/issues/749) [PR #750](https://github.com/RDFLib/rdflib/pull/750) * improve hash efficiency by directly using str/unicode hash [PR #746](https://github.com/RDFLib/rdflib/pull/746) * Added the csvw prefix to the RDFa initial context. [PR #594](https://github.com/RDFLib/rdflib/pull/594) * syncing changes from pyMicrodata [PR #587](https://github.com/RDFLib/rdflib/pull/587) * Microdata parser: updated the parser to the latest version of the microdata->rdf note (published in December 2014) [PR #443](https://github.com/RDFLib/rdflib/pull/443) * Literal.toPython() support for xsd:hexBinary [PR #388](https://github.com/RDFLib/rdflib/pull/388) ### SPARQL Fixes: * Total order patch patch [PR #862](https://github.com/RDFLib/rdflib/pull/862) * use <<= instead of deprecated << [PR #861](https://github.com/RDFLib/rdflib/pull/861) * Fix #847 [PR #856](https://github.com/RDFLib/rdflib/pull/856) * RDF Literal `"1"^^xsd:boolean` should _not_ coerce to True [ISSUE #847](https://github.com/RDFLib/rdflib/issues/847) * Makes NOW() return an UTC date [PR #844](https://github.com/RDFLib/rdflib/pull/844) * NOW() SPARQL should return an xsd:dateTime with a timezone [ISSUE #843](https://github.com/RDFLib/rdflib/issues/843) * fix property paths bug: issue #715 [PR #822](https://github.com/RDFLib/rdflib/pull/822), [ISSUE #715](https://github.com/RDFLib/rdflib/issues/715) * MulPath: correct behaviour of n3() [PR #820](https://github.com/RDFLib/rdflib/pull/820) * Literal total ordering [PR #793](https://github.com/RDFLib/rdflib/pull/793) * Remove SPARQLWrapper dependency [PR #744](https://github.com/RDFLib/rdflib/pull/744) * made UNION faster by not preventing duplicates [PR #741](https://github.com/RDFLib/rdflib/pull/741) * added a hook to add custom functions to SPARQL [PR #723](https://github.com/RDFLib/rdflib/pull/723) * Issue714 [PR #717](https://github.com/RDFLib/rdflib/pull/717) * Use <<= instead of deprecated << in SPARQL parser [PR #417](https://github.com/RDFLib/rdflib/pull/417) * Custom FILTER function for SPARQL engine [ISSUE #274](https://github.com/RDFLib/rdflib/issues/274) ### Code Quality and Cleanups: * a slightly opinionated autopep8 run [PR #870](https://github.com/RDFLib/rdflib/pull/870) * remove rdfa and microdata parsers from core RDFLib [PR #828](https://github.com/RDFLib/rdflib/pull/828) * ClosedNamespace KeyError -> AttributeError [PR #827](https://github.com/RDFLib/rdflib/pull/827) * typo in rdflib/plugins/sparql/update.py [ISSUE #760](https://github.com/RDFLib/rdflib/issues/760) * Fix logging in interactive mode [PR #731](https://github.com/RDFLib/rdflib/pull/731) * make namespace module flake8-compliant, change exceptions in that mod… [PR #711](https://github.com/RDFLib/rdflib/pull/711) * delete ez_setup.py? [ISSUE #669](https://github.com/RDFLib/rdflib/issues/669) * code duplication issue between rdflib and pymicrodata [ISSUE #582](https://github.com/RDFLib/rdflib/issues/582) * Transition from 2to3 to use of six.py to be merged in 5.0.0-dev [PR #519](https://github.com/RDFLib/rdflib/pull/519) * sparqlstore drop deprecated methods and args [PR #516](https://github.com/RDFLib/rdflib/pull/516) * python3 code seems shockingly inefficient [ISSUE #440](https://github.com/RDFLib/rdflib/issues/440) * removed md5_term_hash, fixes #240 [PR #439](https://github.com/RDFLib/rdflib/pull/439), [ISSUE #240](https://github.com/RDFLib/rdflib/issues/240) ### Testing: * 3.7 for travis [PR #864](https://github.com/RDFLib/rdflib/pull/864) * Added trig unit tests to highlight some current parsing/serializing issues [PR #431](https://github.com/RDFLib/rdflib/pull/431) ### Documentation Fixes: * Fix a doc string in the query module [PR #976](https://github.com/RDFLib/rdflib/pull/976) * setup.py: Make the license field use an SPDX identifier [PR #789](https://github.com/RDFLib/rdflib/pull/789) * Update README.md [PR #764](https://github.com/RDFLib/rdflib/pull/764) * Update namespaces_and_bindings.rst [PR #757](https://github.com/RDFLib/rdflib/pull/757) * DOC: README.md: rdflib-jsonld, https uris [PR #712](https://github.com/RDFLib/rdflib/pull/712) * make doctest support py2/py3 [ISSUE #707](https://github.com/RDFLib/rdflib/issues/707) * `pip install rdflib` (as per README.md) gets OSError on Mint 18.1 [ISSUE #704](https://github.com/RDFLib/rdflib/issues/704) 2017-01-29 RELEASE 4.2.2 ======================== This is a bug-fix release, and the last release in the 4.X.X series. Bug fixes: ---------- * SPARQL bugs fixed: * Fix for filters in sub-queries [#693](https://github.com/RDFLib/rdflib/pull/693) * Fixed bind, initBindings and filter problems [#294](https://github.com/RDFLib/rdflib/issues/294) [#555](https://github.com/RDFLib/rdflib/pull/555) [#580](https://github.com/RDFLib/rdflib/issues/580) [#586](https://github.com/RDFLib/rdflib/issues/586) [#601](https://github.com/RDFLib/rdflib/pull/601) [#615](https://github.com/RDFLib/rdflib/issues/615) [#617](https://github.com/RDFLib/rdflib/issues/617) [#619](https://github.com/RDFLib/rdflib/issues/619) [#630](https://github.com/RDFLib/rdflib/issues/630) [#653](https://github.com/RDFLib/rdflib/issues/653) [#686](https://github.com/RDFLib/rdflib/issues/686) [#688](https://github.com/RDFLib/rdflib/pull/688) [#692](https://github.com/RDFLib/rdflib/pull/692) * Fixed unexpected None value in SPARQL-update [#633](https://github.com/RDFLib/rdflib/issues/633) [#634](https://github.com/RDFLib/rdflib/pull/634) * Fix sparql, group by and count of null values with `optional` [#631](https://github.com/RDFLib/rdflib/issues/631) * Fixed sparql sub-query and aggregation bugs [#607](https://github.com/RDFLib/rdflib/issues/607) [#610](https://github.com/RDFLib/rdflib/pull/610) [#628](https://github.com/RDFLib/rdflib/issues/628) [#694](https://github.com/RDFLib/rdflib/pull/694) * Fixed parsing Complex BGPs as triples [#622](https://github.com/RDFLib/rdflib/pull/622) [#623](https://github.com/RDFLib/rdflib/issues/623) * Fixed DISTINCT being ignored inside aggregate functions [#404](https://github.com/RDFLib/rdflib/issues/404) [#611](https://github.com/RDFLib/rdflib/pull/611) [#678](https://github.com/RDFLib/rdflib/pull/678) * Fix unicode encoding errors in sparql processor [#446](https://github.com/RDFLib/rdflib/issues/446) [#599](https://github.com/RDFLib/rdflib/pull/599) * Fixed SPARQL select nothing no longer returning a `None` row [#554](https://github.com/RDFLib/rdflib/issues/554) [#592](https://github.com/RDFLib/rdflib/pull/592) * Fixed aggregate operators COUNT and SAMPLE to ignore unbound / NULL values [#564](https://github.com/RDFLib/rdflib/pull/564) [#563](https://github.com/RDFLib/rdflib/issues/563) [#567](https://github.com/RDFLib/rdflib/pull/567) [#568](https://github.com/RDFLib/rdflib/pull/568) * Fix sparql relative uris [#523](https://github.com/RDFLib/rdflib/issues/523) [#524](https://github.com/RDFLib/rdflib/pull/524) * SPARQL can now compare xsd:date type as well, fixes #532 [#532](https://github.com/RDFLib/rdflib/issues/532) [#533](https://github.com/RDFLib/rdflib/pull/533) * fix sparql path order on python3: "TypeError: unorderable types: SequencePath() < SequencePath()"" [#492](https://github.com/RDFLib/rdflib/issues/492) [#525](https://github.com/RDFLib/rdflib/pull/525) * SPARQL parser now robust to spurious semicolon [#381](https://github.com/RDFLib/rdflib/issues/381) [#528](https://github.com/RDFLib/rdflib/pull/528) * Let paths be comparable against all nodes even in py3 (preparedQuery error) [#545](https://github.com/RDFLib/rdflib/issues/545) [#552](https://github.com/RDFLib/rdflib/pull/552) * Made behavior of `initN` in `update` and `query` more consistent [#579](https://github.com/RDFLib/rdflib/issues/579) [#600](https://github.com/RDFLib/rdflib/pull/600) * SparqlStore: * SparqlStore now closes underlying urllib response body [#638](https://github.com/RDFLib/rdflib/pull/638) [#683](https://github.com/RDFLib/rdflib/pull/683) * SparqlStore injectPrefixes only modifies query if prefixes present and if adds a newline in between [#521](https://github.com/RDFLib/rdflib/issues/521) [#522](https://github.com/RDFLib/rdflib/pull/522) * Fixes and tests for AuditableStore [#537](https://github.com/RDFLib/rdflib/pull/537) [#557](https://github.com/RDFLib/rdflib/pull/557) * Trig bugs fixed: * trig export of multiple graphs assigns wrong prefixes to prefixedNames [#679](https://github.com/RDFLib/rdflib/issues/679) * Trig serialiser writing empty named graph name for default graph [#433](https://github.com/RDFLib/rdflib/issues/433) * Trig parser can creating multiple contexts for the default graph [#432](https://github.com/RDFLib/rdflib/issues/432) * Trig serialisation handling prefixes incorrectly [#428](https://github.com/RDFLib/rdflib/issues/428) [#699](https://github.com/RDFLib/rdflib/pull/699) * Fixed Nquads parser handling of triples in default graph [#535](https://github.com/RDFLib/rdflib/issues/535) [#536](https://github.com/RDFLib/rdflib/pull/536) * Fixed TypeError in Turtle serializer (unorderable types: DocumentFragment() > DocumentFragment()) [#613](https://github.com/RDFLib/rdflib/issues/613) [#648](https://github.com/RDFLib/rdflib/issues/648) [#666](https://github.com/RDFLib/rdflib/pull/666) [#676](https://github.com/RDFLib/rdflib/issues/676) * Fixed serialization and parsing of inf/nan [#655](https://github.com/RDFLib/rdflib/pull/655) [#658](https://github.com/RDFLib/rdflib/pull/658) * Fixed RDFa parser from failing on time elements with child nodes [#576](https://github.com/RDFLib/rdflib/issues/576) [#577](https://github.com/RDFLib/rdflib/pull/577) * Fix double reduction of \\ escapes in from_n3 [#546](https://github.com/RDFLib/rdflib/issues/546) [#548](https://github.com/RDFLib/rdflib/pull/548) * Fixed handling of xsd:base64Binary [#646](https://github.com/RDFLib/rdflib/issues/646) [#674](https://github.com/RDFLib/rdflib/pull/674) * Fixed Collection.__setitem__ broken [#604](https://github.com/RDFLib/rdflib/issues/604) [#605](https://github.com/RDFLib/rdflib/pull/605) * Fix ImportError when __main__ already loaded [#616](https://github.com/RDFLib/rdflib/pull/616) * Fixed broken top_level.txt file in distribution [#571](https://github.com/RDFLib/rdflib/issues/571) [#572](https://github.com/RDFLib/rdflib/pull/572) [#573](https://github.com/RDFLib/rdflib/pull/573) Enhancements: ------------- * Added support for Python 3.5+ [#526](https://github.com/RDFLib/rdflib/pull/526) * More aliases for common formats (nt, turtle) [#701](https://github.com/RDFLib/rdflib/pull/701) * Improved RDF1.1 ntriples support [#695](https://github.com/RDFLib/rdflib/issues/695) [#700](https://github.com/RDFLib/rdflib/pull/700) * Dependencies updated and improved compatibility with pyparsing, html5lib, SPARQLWrapper and elementtree [#550](https://github.com/RDFLib/rdflib/pull/550) [#589](https://github.com/RDFLib/rdflib/issues/589) [#606](https://github.com/RDFLib/rdflib/issues/606) [#641](https://github.com/RDFLib/rdflib/pull/641) [#642](https://github.com/RDFLib/rdflib/issues/642) [#650](https://github.com/RDFLib/rdflib/pull/650) [#671](https://github.com/RDFLib/rdflib/issues/671) [#675](https://github.com/RDFLib/rdflib/pull/675) [#684](https://github.com/RDFLib/rdflib/pull/684) [#696](https://github.com/RDFLib/rdflib/pull/696) * Improved prefix for SPARQL namespace in XML serialization [#493](https://github.com/RDFLib/rdflib/issues/493) [#588](https://github.com/RDFLib/rdflib/pull/588) * Performance improvements: * SPARQL Aggregation functions don't build up memory for each row [#678](https://github.com/RDFLib/rdflib/pull/678) * Collections now support += (__iadd__), fixes slow creation of large lists [#609](https://github.com/RDFLib/rdflib/issues/609) [#612](https://github.com/RDFLib/rdflib/pull/612) [#691](https://github.com/RDFLib/rdflib/pull/691) * SPARQL Optimisation to expand BGPs in a smarter way [#547](https://github.com/RDFLib/rdflib/pull/547) * SPARQLStore improvements * improved SPARQLStore BNode customizability [#511](https://github.com/RDFLib/rdflib/issues/511) [#512](https://github.com/RDFLib/rdflib/pull/512) [#513](https://github.com/RDFLib/rdflib/pull/513) [#603](https://github.com/RDFLib/rdflib/pull/603) * Adding the option of using POST for long queries in SPARQLStore [#672](https://github.com/RDFLib/rdflib/issues/672) [#673](https://github.com/RDFLib/rdflib/pull/673) * Exposed the timeout of SPARQLWrapper [#531](https://github.com/RDFLib/rdflib/pull/531) * SPARQL prepared query now carries the original (unparsed) parameters [#565](https://github.com/RDFLib/rdflib/pull/565) * added .n3 methods for path objects [#553](https://github.com/RDFLib/rdflib/pull/553) * Added support for xsd:gYear and xsd:gYearMonth [#635](https://github.com/RDFLib/rdflib/issues/635) [#636](https://github.com/RDFLib/rdflib/pull/636) * Allow duplicates in rdf:List [#223](https://github.com/RDFLib/rdflib/issues/223) [#690](https://github.com/RDFLib/rdflib/pull/690) * Improved slicing of Resource objects [#529](https://github.com/RDFLib/rdflib/pull/529) Cleanups: --------- * cleanup: SPARQL Prologue and Query new style classes [#566](https://github.com/RDFLib/rdflib/pull/566) * Reduce amount of warnings, especially closing opened file pointers [#518](https://github.com/RDFLib/rdflib/pull/518) [#651](https://github.com/RDFLib/rdflib/issues/651) * Improved ntriples parsing exceptions to actually tell you what's wrong [#640](https://github.com/RDFLib/rdflib/pull/640) [#643](https://github.com/RDFLib/rdflib/pull/643) * remove ancient and broken 2.3 support code. [#680](https://github.com/RDFLib/rdflib/issues/680) [#681](https://github.com/RDFLib/rdflib/pull/681) * Logger output improved [#662](https://github.com/RDFLib/rdflib/pull/662) * properly cite RGDA1 [#624](https://github.com/RDFLib/rdflib/pull/624) * Avoid class reference to imported function [#574](https://github.com/RDFLib/rdflib/issues/574) [#578](https://github.com/RDFLib/rdflib/pull/578) * Use find_packages for package discovery. [#590](https://github.com/RDFLib/rdflib/pull/590) * Prepared ClosedNamespace (and _RDFNamespace) to inherit from Namespace (5.0.0) [#551](https://github.com/RDFLib/rdflib/pull/551) [#595](https://github.com/RDFLib/rdflib/pull/595) * Avoid verbose build logging [#534](https://github.com/RDFLib/rdflib/pull/534) * (ultra petty) Remove an unused import [#593](https://github.com/RDFLib/rdflib/pull/593) Testing improvements: --------------------- * updating deprecated testing syntax [#697](https://github.com/RDFLib/rdflib/pull/697) * make test 375 more portable (use sys.executable rather than python) [#664](https://github.com/RDFLib/rdflib/issues/664) [#668](https://github.com/RDFLib/rdflib/pull/668) * Removed outdated, skipped test for #130 that depended on content from the internet [#256](https://github.com/RDFLib/rdflib/issues/256) * enable all warnings during travis nosetests [#517](https://github.com/RDFLib/rdflib/pull/517) * travis updates [#659](https://github.com/RDFLib/rdflib/issues/659) * travis also builds release branches [#598](https://github.com/RDFLib/rdflib/pull/598) Doc improvements: ----------------- * Update list of builtin serialisers in docstring [#621](https://github.com/RDFLib/rdflib/pull/621) * Update reference to "Emulating container types" [#575](https://github.com/RDFLib/rdflib/issues/575) [#581](https://github.com/RDFLib/rdflib/pull/581) [#583](https://github.com/RDFLib/rdflib/pull/583) [#584](https://github.com/RDFLib/rdflib/pull/584) * docs: clarify the use of an identifier when persisting a triplestore [#654](https://github.com/RDFLib/rdflib/pull/654) * DOC: fix simple typo, -> unnamed [#562](https://github.com/RDFLib/rdflib/pull/562) 2015-08-12 RELEASE 4.2.1 ======================== This is a bug-fix release. Minor enhancements: ------------------- * Added a Networkx connector [#471](https://github.com/RDFLib/rdflib/pull/471), [#507](https://github.com/RDFLib/rdflib/pull/507) * Added a graph_tool connector [#473](https://github.com/RDFLib/rdflib/pull/473) * Added a `graphs` method to the Dataset object [#504](https://github.com/RDFLib/rdflib/pull/504), [#495](https://github.com/RDFLib/rdflib/issues/495) * Batch commits for `SPARQLUpdateStore` [#486](https://github.com/RDFLib/rdflib/pull/486) Bug fixes: ---------- * Fixed bnode collision bug [#506](https://github.com/RDFLib/rdflib/pull/506), [#496](https://github.com/RDFLib/rdflib/pull/496), [#494](https://github.com/RDFLib/rdflib/issues/494) * fix `util.from_n3()` parsing Literals with datatypes and Namespace support [#503](https://github.com/RDFLib/rdflib/pull/503), [#502](https://github.com/RDFLib/rdflib/issues/502) * make `Identifier.__hash__` stable wrt. multi processes [#501](https://github.com/RDFLib/rdflib/pull/501), [#500](https://github.com/RDFLib/rdflib/issues/500) * fix handling `URLInputSource` without content-type [#499](https://github.com/RDFLib/rdflib/pull/499), [#498](https://github.com/RDFLib/rdflib/pull/498) * no relative import in `algebra` when run as a script [#497](https://github.com/RDFLib/rdflib/pull/497) * Duplicate option in armstrong `theme.conf` removed [#491](https://github.com/RDFLib/rdflib/issues/491) * `Variable.__repr__` returns a python representation string, not n3 [#488](https://github.com/RDFLib/rdflib/pull/488) * fixed broken example [#482](https://github.com/RDFLib/rdflib/pull/482) * trig output fixes [#480](https://github.com/RDFLib/rdflib/pull/480) * set PYTHONPATH to make rdfpipe tests use the right rdflib version [#477](https://github.com/RDFLib/rdflib/pull/477) * fix RDF/XML problem with unqualified use of `rdf:about` [#470](https://github.com/RDFLib/rdflib/pull/470), [#468](https://github.com/RDFLib/rdflib/issues/468) * `AuditableStore` improvements [#469](https://github.com/RDFLib/rdflib/pull/469), [#463](https://github.com/RDFLib/rdflib/pull/463) * added asserts for `graph.set([s,p,o])` so `s` and `p` aren't `None` [#467](https://github.com/RDFLib/rdflib/pull/467) * `threading.RLock` instances are context managers [#465](https://github.com/RDFLib/rdflib/pull/465) * SPARQLStore does not transform Literal('') into Literal('None') anymore [#459](https://github.com/RDFLib/rdflib/pull/459), [#457](https://github.com/RDFLib/rdflib/issues/457) * slight performance increase for graph.all_nodes() [#458](https://github.com/RDFLib/rdflib/pull/458) Testing improvements: --------------------- * travis: migrate to docker container infrastructure [#508](https://github.com/RDFLib/rdflib/pull/508) * test for narrow python builds (chars > 0xFFFF) (related to [#453](https://github.com/RDFLib/rdflib/pull/453), [#454](https://github.com/RDFLib/rdflib/pull/454) ) [#456](https://github.com/RDFLib/rdflib/issues/456), [#509](https://github.com/RDFLib/rdflib/pull/509) * dropped testing py3.2 [#448](https://github.com/RDFLib/rdflib/issues/448) * Running a local fuseki server on travis and making it failsafe [#476](https://github.com/RDFLib/rdflib/pull/476), [#475](https://github.com/RDFLib/rdflib/issues/475), [#474](https://github.com/RDFLib/rdflib/pull/474), [#466](https://github.com/RDFLib/rdflib/pull/466), [#460](https://github.com/RDFLib/rdflib/issues/460) * exclude `def main():` functions from test coverage analysis [#472](https://github.com/RDFLib/rdflib/pull/472) 2015-02-19 RELEASE 4.2.0 ======================== This is a new minor version of RDFLib including a handful of new features: * Supporting N-Triples 1.1 syntax using UTF-8 encoding [#447](https://github.com/RDFLib/rdflib/pull/447), [#449](https://github.com/RDFLib/rdflib/pull/449), [#400](https://github.com/RDFLib/rdflib/issues/400) * Graph comparison now really works using RGDA1 (RDF Graph Digest Algorithm 1) [#441](https://github.com/RDFLib/rdflib/pull/441) [#385](https://github.com/RDFLib/rdflib/issues/385) * More graceful degradation than simple crashing for unicode chars > 0xFFFF on narrow python builds. Parsing such characters will now work, but issue a UnicodeWarning. If you run `python -W all` you will already see a warning on `import rdflib` will show a warning (ImportWarning). [#453](https://github.com/RDFLib/rdflib/pull/453), [#454](https://github.com/RDFLib/rdflib/pull/454) * URLInputSource now supports json-ld [#425](https://github.com/RDFLib/rdflib/pull/425) * SPARQLStore is now graph aware [#401](https://github.com/RDFLib/rdflib/pull/401), [#402](https://github.com/RDFLib/rdflib/pull/402) * SPARQLStore now uses SPARQLWrapper for updates [#397](https://github.com/RDFLib/rdflib/pull/397) * Certain logging output is immediately shown in interactive mode [#414](https://github.com/RDFLib/rdflib/pull/414) * Python 3.4 fully supported [#418](https://github.com/RDFLib/rdflib/pull/418) Minor enhancements & bugs fixed: -------------------------------- * Fixed double invocation of 2to3 [#437](https://github.com/RDFLib/rdflib/pull/437) * PyRDFa parser missing brackets [#434](https://github.com/RDFLib/rdflib/pull/434) * Correctly handle \uXXXX and \UXXXXXXXX escapes in n3 files [#426](https://github.com/RDFLib/rdflib/pull/426) * Logging cleanups and keeping it on stderr [#420](https://github.com/RDFLib/rdflib/pull/420) [#414](https://github.com/RDFLib/rdflib/pull/414) [#413](https://github.com/RDFLib/rdflib/issues/413) * n3: allow @base URI to have a trailing '#' [#407](https://github.com/RDFLib/rdflib/pull/407) [#379](https://github.com/RDFLib/rdflib/issues/379) * microdata: add file:// to base if it's a filename so rdflib can parse its own output [#406](https://github.com/RDFLib/rdflib/pull/406) [#403](https://github.com/RDFLib/rdflib/issues/403) * TSV Results parse skips empty bindings in result [#390](https://github.com/RDFLib/rdflib/pull/390) * fixed accidental test run due to name [#389](https://github.com/RDFLib/rdflib/pull/389) * Bad boolean list serialization to Turtle & fixed ambiguity between Literal(False) and None [#387](https://github.com/RDFLib/rdflib/pull/387) [#382](https://github.com/RDFLib/rdflib/pull/382) * Current version number & PyPI link in README.md [#383](https://github.com/RDFLib/rdflib/pull/383) 2014-04-15 RELEASE 4.1.2 ======================== This is a bug-fix release. * Fixed unicode/str bug in py3 for rdfpipe [#375](https://github.com/RDFLib/rdflib/issues/375) 2014-03-03 RELEASE 4.1.1 ======================== This is a bug-fix release. This will be the last RDFLib release to support python 2.5. * The RDF/XML Parser was made stricter, now raises exceptions for illegal repeated node-elements. [#363](https://github.com/RDFLib/rdflib/issues/363) * The SPARQLUpdateStore now supports non-ascii unicode in update statements [#356](https://github.com/RDFLib/rdflib/issues/356) * Fixed a bug in the NTriple/NQuad parser wrt. to unicode escape sequences [#352](https://github.com/RDFLib/rdflib/issues/352) * HTML5Lib is no longer pinned to 0.95 [#355](https://github.com/RDFLib/rdflib/issues/360) * RDF/XML Serializer now uses parseType=Literal for well-formed XML literals * A bug in the manchester OWL syntax was fixed [#355](https://github.com/RDFLib/rdflib/issues/355) 2013-12-31 RELEASE 4.1 ====================== This is a new minor version RDFLib, which includes a handful of new features: * A TriG parser was added (we already had a serializer) - it is up-to-date wrt. to the newest spec from: http://www.w3.org/TR/trig/ * The Turtle parser was made up to date wrt. to the latest Turtle spec. * Many more tests have been added - RDFLib now has over 2000 (passing!) tests. This is mainly thanks to the NT, Turtle, TriG, NQuads and SPARQL test-suites from W3C. This also included many fixes to the nt and nquad parsers. * ```ConjunctiveGraph``` and ```Dataset``` now support directly adding/removing quads with ```add/addN/remove``` methods. * ```rdfpipe``` command now supports datasets, and reading/writing context sensitive formats. * Optional graph-tracking was added to the Store interface, allowing empty graphs to be tracked for Datasets. The DataSet class also saw a general clean-up, see: [#309](https://github.com/RDFLib/rdflib/pull/309) * After long deprecation, ```BackwardCompatibleGraph``` was removed. Minor enhancements/bugs fixed: ------------------------------ * Many code samples in the documentation were fixed thanks to @PuckCh * The new ```IOMemory``` store was optimised a bit * ```SPARQL(Update)Store``` has been made more generic. * MD5 sums were never reinitialized in ```rdflib.compare``` * Correct default value for empty prefix in N3 [#312](https://github.com/RDFLib/rdflib/issues/312) * Fixed tests when running in a non UTF-8 locale [#344](https://github.com/RDFLib/rdflib/issues/344) * Prefix in the original turtle have an impact on SPARQL query resolution [#313](https://github.com/RDFLib/rdflib/issues/313) * Duplicate BNode IDs from N3 Parser [#305](https://github.com/RDFLib/rdflib/issues/305) * Use QNames for TriG graph names [#330](https://github.com/RDFLib/rdflib/issues/330) * \uXXXX escapes in Turtle/N3 were fixed [#335](https://github.com/RDFLib/rdflib/issues/335) * A way to limit the number of triples retrieved from the ```SPARQLStore``` was added [#346](https://github.com/RDFLib/rdflib/pull/346) * Dots in localnames in Turtle [#345](https://github.com/RDFLib/rdflib/issues/345) [#336](https://github.com/RDFLib/rdflib/issues/336) * ```BNode``` as Graph's public ID [#300](https://github.com/RDFLib/rdflib/issues/300) * Introduced ordering of ```QuotedGraphs``` [#291](https://github.com/RDFLib/rdflib/issues/291) 2013-05-22 RELEASE 4.0.1 ======================== Following RDFLib tradition, some bugs snuck into the 4.0 release. This is a bug-fixing release: * the new URI validation caused lots of problems, but is necessary to avoid ''RDF injection'' vulnerabilities. In the spirit of ''be liberal in what you accept, but conservative in what you produce", we moved validation to serialisation time. * the ```rdflib.tools``` package was missing from the ```setup.py``` script, and was therefore not included in the PYPI tarballs. * RDF parser choked on empty namespace URI [#288](https://github.com/RDFLib/rdflib/issues/288) * Parsing from ```sys.stdin``` was broken [#285](https://github.com/RDFLib/rdflib/issues/285) * The new IO store had problems with concurrent modifications if several graphs used the same store [#286](https://github.com/RDFLib/rdflib/issues/286) * Moved HTML5Lib dependency to the recently released 1.0b1 which support python3 2013-05-16 RELEASE 4.0 ====================== This release includes several major changes: * The new SPARQL 1.1 engine (rdflib-sparql) has been included in the core distribution. SPARQL 1.1 queries and updates should work out of the box. * SPARQL paths are exposed as operators on ```URIRefs```, these can then be be used with graph.triples and friends: ```py # List names of friends of Bob: g.triples(( bob, FOAF.knows/FOAF.name , None )) # All super-classes: g.triples(( cls, RDFS.subClassOf * '+', None )) ``` * a new ```graph.update``` method will apply SPARQL update statements * Several RDF 1.1 features are available: * A new ```DataSet``` class * ```XMLLiteral``` and ```HTMLLiterals``` * ```BNode``` (de)skolemization is supported through ```BNode.skolemize```, ```URIRef.de_skolemize```, ```Graph.skolemize``` and ```Graph.de_skolemize``` * Handled of Literal equality was split into lexical comparison (for normal ```==``` operator) and value space (using new ```Node.eq``` methods). This introduces some slight backwards incompatible changes, but was necessary, as the old version had inconsistent hash and equality methods that could lead the literals not working correctly in dicts/sets. The new way is more in line with how SPARQL 1.1 works. For the full details, see: https://github.com/RDFLib/rdflib/wiki/Literal-reworking * Iterating over ```QueryResults``` will generate ```ResultRow``` objects, these allow access to variable bindings as attributes or as a dict. I.e. ```py for row in graph.query('select ... ') : print row.age, row["name"] ``` * "Slicing" of Graphs and Resources as syntactic sugar: ([#271](https://github.com/RDFLib/rdflib/issues/271)) ```py graph[bob : FOAF.knows/FOAF.name] -> generator over the names of Bobs friends ``` * The ```SPARQLStore``` and ```SPARQLUpdateStore``` are now included in the RDFLib core * The documentation has been given a major overhaul, and examples for most features have been added. Minor Changes: -------------- * String operations on URIRefs return new URIRefs: ([#258](https://github.com/RDFLib/rdflib/issues/258)) ```py >>> URIRef('http://example.org/')+'test rdflib.term.URIRef('http://example.org/test') ``` * Parser/Serializer plugins are also found by mime-type, not just by plugin name: ([#277](https://github.com/RDFLib/rdflib/issues/277)) * ```Namespace``` is no longer a subclass of ```URIRef``` * URIRefs and Literal language tags are validated on construction, avoiding some "RDF-injection" issues ([#266](https://github.com/RDFLib/rdflib/issues/266)) * A new memory store needs much less memory when loading large graphs ([#268](https://github.com/RDFLib/rdflib/issues/268)) * Turtle/N3 serializer now supports the base keyword correctly ([#248](https://github.com/RDFLib/rdflib/issues/248)) * py2exe support was fixed ([#257](https://github.com/RDFLib/rdflib/issues/257)) * Several bugs in the TriG serializer were fixed * Several bugs in the NQuads parser were fixed 2013-03-01 RELEASE 3.4 ====================== This release introduced new parsers for structured data in HTML. In particular formats: hturtle, rdfa, mdata and an auto-detecting html format were added. Thanks to Ivan Herman for this! This release includes a lot of admin maintentance - correct dependencies for different python versions, etc. Several py3 bugs were also fixed. This release drops python 2.4 compatibility - it was just getting too expensive for us to maintain. It should however be compatible with any cpython from 2.5 through 3.3. * ```node.md5_term``` is now deprecated, if you use it let us know. * Literal.datatype/language are now read-only properties ([#226](https://github.com/RDFLib/rdflib/issues/226)) * Serializing to file fails in py3 ([#249](https://github.com/RDFLib/rdflib/issues/249)) * TriX serializer places two xmlns attributes on same element ([#250](https://github.com/RDFLib/rdflib/issues/250)) * RDF/XML parser fails on when XML namespace is not explicitly declared ([#247](https://github.com/RDFLib/rdflib/issues/247)) * Resource class should "unbox" Resource instances on add ([#215](https://github.com/RDFLib/rdflib/issues/215)) * Turtle/N3 does not encode final quote of a string ([#239](https://github.com/RDFLib/rdflib/issues/239)) * float Literal precision lost when serializing graph to turtle or n3 ([#237](https://github.com/RDFLib/rdflib/issues/237)) * plain-literal representation of xsd:decimals fixed * allow read-only sleepycat stores * language tag parsing in N3/Turtle fixes to allow several subtags. 2012-10-10 RELEASE 3.2.3 ======================== Almost identical to 3.2.2 A stupid bug snuck into 3.2.2, and querying graphs were broken. * Fixes broken querying ([#234](https://github.com/RDFLib/rdflib/issues/234)) * graph.transitiveClosure now works with loops ([#206](https://github.com/RDFLib/rdflib/issues/206)) 2012-09-25 RELEASE 3.2.2 ======================== This is mainly a maintenance release. This release should be compatible with python 2.4 through to 3. Changes: * Improved serialization/parsing roundtrip tests led to some fixes of obscure parser/serializer bugs. In particular complex string Literals in ntriples improved a lot. * The terms of a triple are now asserted to be RDFLib Node's in graph.add This should avoid getting strings and other things in the store. ([#200](https://github.com/RDFLib/rdflib/issues/200)) * Added a specific TurtleParser that does not require the store to be non-formula aware. ([#214](https://github.com/RDFLib/rdflib/issues/214)) * A trig-serializer was added, see: http://www4.wiwiss.fu-berlin.de/bizer/trig/ * BNode generation was made thread-safe ([#209](https://github.com/RDFLib/rdflib/issues/209)) (also fixed better by dzinxed) * Illegal BNode IDs removed from NT output: ([#212](https://github.com/RDFLib/rdflib/issues/212)) * and more minor bug fixes that had no issues 2012-04-24 RELEASE 3.2.1 ======================== This is mainly a maintenance release. Changes: * New setuptools entry points for query processors and results * Literals constructed from other literals copy datatype/lang ([#188](https://github.com/RDFLib/rdflib/issues/188)) * Relative URIs are resolved incorrectly after redirects ([#130](https://github.com/RDFLib/rdflib/issues/130)) * Illegal prefixes in turtle output ([#161](https://github.com/RDFLib/rdflib/issues/161)) * Sleepcat store unstable prefixes ([#201](https://github.com/RDFLib/rdflib/issues/201)) * Consistent toPyton() for all node objects ([#174](https://github.com/RDFLib/rdflib/issues/174)) * Better random BNode ID in multi-thread environments ([#185](https://github.com/RDFLib/rdflib/issues/185)) 2012-01-19 RELEASE 3.2.0 ======================== Major changes: * Thanks to Thomas Kluyver, rdflib now works under python3, the setup.py script automatically runs 2to3. * Unit tests were updated and cleaned up. Now all tests should pass. * Documentation was updated and cleaned up. * A new resource oriented API was added: http://code.google.com/p/rdflib/issues/detail?id=166 Fixed many minor issues: * http://code.google.com/p/rdflib/issues/detail?id=177 http://code.google.com/p/rdflib/issues/detail?id=129 Restored compatibility with Python 2.4 * http://code.google.com/p/rdflib/issues/detail?id=158 Reworking of Query result handling * http://code.google.com/p/rdflib/issues/detail?id=193 generating xml:base attribute in RDF/XML output * http://code.google.com/p/rdflib/issues/detail?id=180 serialize(format="pretty-xml") fails on cyclic links 2011-03-17 RELEASE 3.1.0 ======================== Fixed a range of minor issues: * http://code.google.com/p/rdflib/issues/detail?id=128 Literal.__str__ does not behave like unicode * http://code.google.com/p/rdflib/issues/detail?id=141 (RDFa Parser) Does not handle application/xhtml+xml * http://code.google.com/p/rdflib/issues/detail?id=142 RDFa TC #117: Fragment identifiers stripped from BASE * http://code.google.com/p/rdflib/issues/detail?id=146 Malformed literals produced when rdfa contains newlines * http://code.google.com/p/rdflib/issues/detail?id=152 Namespaces beginning with _ are invalid * http://code.google.com/p/rdflib/issues/detail?id=156 Turtle Files with a UTF-8 BOM fail to parse * http://code.google.com/p/rdflib/issues/detail?id=154 ClosedNamespace.__str__ returns URIRef not str * http://code.google.com/p/rdflib/issues/detail?id=150 IOMemory does not override open * http://code.google.com/p/rdflib/issues/detail?id=153 Timestamps with microseconds *and* "Z" timezone are not parsed * http://code.google.com/p/rdflib/issues/detail?id=118 DateTime literals with offsets fail to convert to Python * http://code.google.com/p/rdflib/issues/detail?id=157 Timestamps with timezone information are not parsed * http://code.google.com/p/rdflib/issues/detail?id=151 problem with unicode literals in rdflib.compare.graph_diff * http://code.google.com/p/rdflib/issues/detail?id=149 BerkeleyDB Store broken with create=False * http://code.google.com/p/rdflib/issues/detail?id=134 Would be useful if Graph.query could propagate kwargs to a plugin processor * http://code.google.com/p/rdflib/issues/detail?id=133 Graph.connected exception when passed empty graph * http://code.google.com/p/rdflib/issues/detail?id=129 Not compatible with Python 2.4 * http://code.google.com/p/rdflib/issues/detail?id=119 Support Python's set operations on Graph * http://code.google.com/p/rdflib/issues/detail?id=130 NT output encoding to utf-8 broken as it goes through _xmlcharrefreplace * http://code.google.com/p/rdflib/issues/detail?id=121#c1 Store SPARQL Support 2010-05-13 RELEASE 3.0.0 ======================== Working test suite with all tests passing. Removed dependency on setuptools. (Issue #43) Updated Package and Module Names to follow conventions outlined in http://www.python.org/dev/peps/pep-0008/ Removed SPARQL bits and non core plugins. They are mostly moving to http://code.google.com/p/rdfextras/ at least until they are stable. Fixed datatype for Literal(True). Fixed Literal to enforce constraint of having either a language or datatype but not both. Fixed Literal's repr. Fixed to Graph Add/Sub/Mul opterators. Upgraded RDFa parser to pyRdfa. Upgraded N3 parser to the one from CWM. Fixed unicode encoding issue involving N3Parser. N3 serializer improvements. Fixed HTTP content-negotiation Fixed Store.namespaces method (which caused a few issues depending on Store implementation being used.) Fixed interoperability issue with plugin module. Fixed use of Deprecated functionality. 2009-03-30 RELEASE 2.4.1 ======================== Fixed Literal comparison case involving Literal's with datatypes of XSD.base64Binary. Fixed case where XSD.date was matching before XSD.dateTime for datetime instances. Fixed jython interoperability issue (issue #53). Fixed Literal repr to handle apostrophes correctly (issue #28). Fixed Literal's repr to be consistent with its ```__init__``` (issue #33). 2007-04-04 RELEASE 2.4.0 ======================== Improved Literal comparison / equality Sparql cleanup. getLiteralValue now returns the Literal object instead of the result of toPython(). Now that Literals override a good coverage of comparison operators, they should be passed around as first class objects in the SPARQL evaluation engine. Added support for session bnodes re: sparql Fixed prolog reduce/reduce conflict. Added Py_None IncRefs where they were being passed into Python method invocations (per drewp's patch) Fixed sparql queries involving empty namespace prefix. Fixed the selected variables sparql issue Fixed support in SPARQL queries. Fixed involving multiple unions and queries are nested more than one level (bug in _getAllVariables causing failure when parent.top is None) Fixed test_sparql_equals.py. Fixed sparql json result comma errors issue. Fixed test_sparql_json_results.py (SELECT * variables out of order) Added a 4Suite-based SPARQL XML Writer implementation. If 4Suite is not installed, the fallback python saxutils is used instead applied patch from http://rdflib.net/issues/2007/02/23/bugs_in_rdflib.sparql.queryresult/issue The restriction on GRAPH patterns with variables has been relieved a bit to allow such usage when the variable is provided as an initial binding Fix for OPTIONAL patterns. P1 OPT P2, where P1 and P2 shared variables which were bound to BNodes were not unifying on these BNode variable efficiently / correctly. The fix was to add bindings for 'stored' BNodes so they aren't confused for wildcards Added support to n3 parser for retaining namespace bindings. Fixed several RDFaParser bugs. Added serializer specific argument support. Fixed a few PrettyXMLSerializer issues and added a max_depth option. Fixed some TurtleSerializer issues. Fixed some N3Serializer issues. Added support easy_install added link to long_descriptin for easy_install -U rdflib==dev to work; added download_url back added continuous-releases-using-subversion bit Added rdflib_tools package Added rdfpipe Added initial EARLPluging Improved test running... using nose... added tests Exposed generated test cases for nose to find. added bit to configure 'setup.py nosetests' to run doc tests added nose test bits Added md5_term_hash method to terms. Added commit_pending_transaction argument to Graph's close method. Added DeprecationWarning to rdflib.constants Added a NamespaceDict class for those who want to avoid the Namespace as subclass of URIRef issues Added bind function Fixed type of Namespace re: URIRef vs. unicode Improved ValueError message Changed value method's any argument to default to True Changed ```__repr__``` to always reflect that it's an rdf.Literal -- as this is the case even though we now have it acting like the corresponding type in some casses A DISTINCT was added to the SELECT clause to ensure duplicate triples are not returned (an RDF graph is a set of triples) - which can happen for certain join expressions. Support for ConditionalAndExpressionList and RelationalExpressionList (|| and && operators in FILTER) Fixed context column comparison. The hash integer was being compared with 'F' causing a warning:Warning: Truncated incorrect DOUBLE value: 'F' applied patch in http://rdflib.net/issues/2006/12/13/typos_in_abstractsqlstore.py/issue fix for http://rdflib.net/issues/2006/12/07/problems_with_graph.seq()_when_sequences_contain_more_than_9_items./issue General code cleanup (removing redundant imports, changing relative imports to absolute imports etc) Removed usage of deprecated bits. Added a number of test cases. Added DeprecationWarning for save method refactoring of GraphPattern ReadOnlyGraphAggregate uses Graph constructor properly to setup (optionally) a common store Fixed bug with . (fullstop) in localname parts. Changed Graph's value method to return None instead of raising an AssertionError. Fixed conversion of (exiplicit) MySQL ports to integers. Fixed MySQL store so it properly calculates ```__len__``` of individual Graphs Aligned with how BerkeleyDB is generating events (remove events are expressed in terms of interned strings) Added code to catch unpickling related exceptions Added BerkeleyDB store implementation. Merged TextIndex from michel-events branch. 2006-10-15 RELEASE 2.3.3 ======================== Added TriXParser, N3Serializer and TurtleSerializer. Added events to store interface: StoreCreated, TripleAdded and TripleRemoved. Added Journal Reader and Writer. Removed BerkeleyDB level journaling. Added support for triple quoted Literal's. Fixed some corner cases with Literal comparison. Fixed PatternResolution for patterns that return contexts only. Fixed NodePickler not to choke on unhashable objects. Fixed Namespace's ```__getattr__``` hack to ignore names starting with __ Added SPARQL != operator. Fixed query result ```__len__``` (more efficient). Fixed and improved RDFa parser. redland patches from http://rdflib.net/pipermail/dev/2006-September/000069.html various patches for the testsuite - http://rdflib.net/pipermail/dev/2006-September/000069.html 2006-08-01 RELEASE 2.3.2 ======================== Added SPARQL query support. Added XSD to/from Python datatype support to Literals. Fixed ConjunctiveGraph so that it is a proper subclass of Graph. Added Deprecation Warning when BackwardCompatGraph gets used. Added RDFa parser. Added Collection Class for working with RDF Collections. Added method to Graph for testing connectedness Fixed bug in N3 parser where identical BNodes were not being combined. Fixed literal quoting in N3 serializer. Fixed RDF/XML serializer to skip over N3 bits. Changed Literal and URIRef instantiation to catch UnicodeDecodeErrors - which were being thrown when the default decoding method (ascii) was hitting certain characters. Changed Graph's bind method to also override the binding in the case of an existing generated bindings. Added FOPLRelationalModel - a set of utility classes that implement a minimal Relational Model of FOPL implemented as a SQL database (uses identifier/value interning and integer half-md5-hashes for space and index efficiency). Changed MySQL store to use FOPLRelationalModel plus fixes and improvements. Added more test cases. Cleaned up source code to follow pep8 / pep257. 2006-02-27 RELEASE 2.3.1 ======================== Added save method to BackwardCompatibleGraph so that example.py etc work again. Applied patch from Drew Perttula to add local_time_zone argument to util's date_time method. Fixed a relativize bug in the rdf/xml serializer. Fixed NameError: global name 'URIRef' is not defined error in BerkeleyDB.py by adding missing import. Applied patch for Seq to sort list by integer, added by Drew Hess. Added a preserve_bnode_ids option to rdf/xml parser. Applied assorted patches for tests (see http://tracker.asemantics.com/rdflib/ticket/8 ) Applied redland.diff (see http://tracker.asemantics.com/rdflib/ticket/9 ) Applied changes specified http://tracker.asemantics.com/rdflib/ticket/7 Added a set method to Graph. Fixed RDF/XML serializer so that it does not choke on n3 bits (rather it'll just ignore them) 2005-12-23 RELEASE 2.3.0 ======================== See http://rdflib.net/2.3.0/ for most up-to-date release notes Added N3 support to Graph and Store. Added Sean's n3p parser, and ntriples parser. BerkeleyDB implementation has been revamped in the process of expanding it to support the new requirements n3 requirements. It also now persists a journal -- more to come. detabified source files. Literal and parsers now distinguish between datatype of None and datatype of "". Store-agnostic 'fallback' implementation of REGEX matching (inefficient but provides the capability to stores that don't support it natively). Implemented as a 'wrapper' around any Store which replaces REGEX terms with None (before dispatching to the store) and whittles out results that don't match the given REGEX term expression(s). Store-agnostic 'fallback' implementation of transactional rollbacks (also inefficient but provides the capability to stores that don't support it natively). Implemented as a wrapper that tracks a 'thread-safe' list of reversal operations (for every add, track the remove call that reverts the store, and vice versa). Upon store.rollback(), execute the reverse operations. However, this doesn't guarantee durability, since if the system fails before the rollbacks are all executed, the store will remain in an invalid state, but it provides Atomicity in the best case scenario. 2005-10-10 RELEASE 2.2.3 ======================== Fixed BerkeleyDB backend to commit after an add and remove. This should help just a bit with those unclean shutdowns ;) Fixed use of logging so that it does not mess with the root logger. Thank you, Arve, for pointing this one out. Fixed Graph's value method to have default for subject in addition to predicate and object. Fixed Fourthought backend to be consistent with interface. It now supports an empty constructor and an open method that takes a configuration string. 2005-09-10 RELEASE 2.2.2 ======================== Applied patch from inkel to add encoding argument to all serialization related methods. Fixed XMLSerializer bug regarding default namespace bindings. Fixed namespace binding bug involving binding a second default namespace. Applied patch from Gunnar AAstrand Grimnes to add context support to ```__iadd__``` on Graph. (Am considering the lack of context support a bug. Any users currently using ```__iadd__```, let me know if this breaks any of your code.) Added Fourthought backend contributed by Chimezie Ogbuji. Fixed a RDF/XML parser bug relating to XMLLiteral and escaping. Fixed setup.py so that install does not try to uninstall (rename_old) before installing; there's now an uninstall command if one needs to uninstall. 2005-08-25 RELEASE 2.2.1 ======================== Fixed issue regarding Python2.3 compatibility. Fixed minor issue with URIRef's absolute method. 2005-08-12 RELEASE 2.1.4 ======================== Added optional base argument to URIRef. Fixed bug where load and parse had inconsistent behavior. Added a FileInputSource. Added skeleton sparql parser and test framework. Included pyparsing (pyparsing.sourceforge.net) for sparql parsing. Added attribute support to namespaces. 2005-06-28 RELEASE 2.1.3 ======================== Added Ivan's sparql-p implementation. Literal is now picklable. Added optional base argument to serialize methods about which to relativize. Applied patch to remove some dependencies on Python 2.4 features. Fixed BNode's n3 serialization bug (recently introduced). Fixed a collections related bug. 2005-05-13 RELEASE 2.1.2 ======================== Added patch from Sidnei da Silva that adds a sqlobject based backend. Fixed bug in PrettyXMLSerializer (rdf prefix decl was missing sometimes) Fixed bug in RDF/XML parser where empty collections where causing exceptions. 2005-05-01 RELEASE 2.1.1 ======================== Fixed a number of bugs relating to 2.0 backward compatibility. Fixed split_uri to handle URIs with _ in them properly. Fixed bug in RDF/XML handler's absolutize that would cause some URIRefs to end in ## Added check_context to Graph. Added patch the improves IOMemory implementation. 2005-04-12 RELEASE 2.1.0 ======================== Merged TripleStore and InformationStore into Graph. Added plugin support (or at least cleaned up, made consistent the plugin support that existed). Added value and seq methods to Graph. Renamed prefix_mapping to bind. Added namespaces method that is a generator over all prefix, namespace bindings. Added notion of NamespaceManager. Added couple new backends, IOMemory and ZODB. 2005-03-19 RELEASE 2.0.6 ======================== Added pretty-xml serializer (inlines BNodes where possible, typed nodes, Collections). Fixed bug in NTParser and n3 methods where not all characters where being escaped. Changed label and comment methods to return default passed in when there is no label or comment. Moved methods to Store Class. Store no longer inherits from Schema. Fixed bug involving a case with rdf:about='#' Changed InMemoryBackend to update third index in the same style it does the first two. 2005-01-08 RELEASE 2.0.5 ======================== Added publicID argument to Store's load method. Added RDF and RDFS to top level rdflib package. 2004-10-14 RELEASE 2.0.4 ======================== Removed unfinished functionality. Fixed bug where another prefix other than rdf was getting defined for the rdf namespace (causing an assertion to fail). Fixed bug in serializer where nodeIDs were not valid NCNames. 2004-04-21 RELEASE 2.0.3 ======================== Added missing "from __future__ import generators" statement to InformationStore. Simplified RDF/XML serializer fixing a few bugs involving BNodes. Added a reset method to RDF/XML parser. Changed 'if foo' to "if foo is not None" in a few places in the RDF/XML parser. Fully qualified imports in rdflib.syntax {parser, serializer}. Context now goes through InformationStore (was bypassing it going directly to backend). 2004-03-22 RELEASE 2.0.2 ======================== Improved performance of Identifier equality tests. Added missing "from __future__ import generators" statements needed to run on Python2.2. Added alternative to shlib.move() if it isn't present. Fixed bug that occurred when specifying a backend to InformationStore's constructor. Fixed bug recently introduced into InformationStore's remove method. 2004-03-15 RELEASE 2.0.1 ======================== Fixed a bug in the SleepyCatBackend multi threaded concurrency support. (Tested fairly extensively under the following conditions: multi threaded, multi process, and both). > NOTE: fix involved change to database format -- so 2.0.1 will not be > able to open databases created with 2.0.0 Removed the use of the Concurrent wrapper around InMemoryBackend and modified InMemoryBackend to handle concurrent requests. (Motivated by Concurrent's poor performance on bigger TripleStores.) Improved the speed of len(store) by making backends responsible for implementing ```__len__```. Context objects now have a identifier property. 2004-03-10 RELEASE 2.0.0 ======================== Fixed a few bugs in the SleepyCatBackend multi process concurrency support. Removed rdflib.Resource Changed remove to now take a triple pattern and removed remove_triples method. Added ```__iadd__``` method to Store in support of store += another_store. 2004-01-04 RELEASE 1.3.2 ======================== Added a serialization dispatcher. Added format arg to save method. Store now remembers prefix/namespace bindings. Backends are now more pluggable ... 2003-10-14 RELEASE 1.3.1 ======================== Fixed bug in serializer where triples where only getting serialized the first time. Added type checking for contexts. Fixed bug that caused comparisons with a Literal to fail when the right hand side was not a string. Added DB_INIT_CDB flag to SCBacked for supporting multiple reader/single writer access Changed rdf:RDF to be optional to conform with latest spec. Fixed handling of XMLLiterals 2003-04-40 RELEASE 1.3.0 ======================== Removed bag_id support and added it to OLD_TERMS. Added a double hash for keys in SCBacked. Fixed _HTTPClient so that it no longer removes metadata about a context right after it adds it. Added a KDTreeStore and RedlandStore backends. Added a StoreTester. 2003-02-28 RELEASE 1.2.4 ======================== Fixed bug in SCBackend where language and datatype information where being ignored. Fixed bug in transitive_subjects. Updated some of the test cases that where not up to date. async_load now adds more http header and error information to the InformationStore. 2003-02-11 RELEASE 1.2.3 ======================== Fixed bug in load methods where relative URLs where not being absolutized correctly on Windows. Fixed serializer so that it throws an exception when trying to serialize a graph with a predicate that can not be split. 2003-02-07 RELEASE 1.2.2 ======================== Added an exists method to the BackwardCompatibility mixin. Added versions of remove, remove_triples and triples methods to the BackwardCompatility mixin for TripleStores that take an s, p, o as opposed to an (s, p, o). 2003-02-03 RELEASE 1.2.1 ======================== Added support for parsing XMLLiterals. Added support for proper charmod checking (only works in Python2.3). Fixed remaining rdfcore test cases that where not passing. Fixed windows bug in AbstractInformationStore's run method. 2003-01-02 RELEASE 1.2.0 ======================== Added systemID, line #, and column # to error messages. BNode prefix is now composed of ascii_letters instead of letters. Added a bsddb backed InformationStore. Added an asynchronous load method, methods for scheduling context updates, and a run method. 2002-12-16 RELEASE 1.1.5 ======================== Introduction of InformationStore, a TripleStore with the addition of context support. Resource ```__getitem__``` now returns object (no longer returns a Resource for the object). Fixed bug in parser that was introduced in last release regaurding unqualified names. 2002-12-10 RELEASE 1.1.4 ======================== Interface realigned with last stable release. Serializer now uses more of the abbreviated forms where possible. Parser optimized and cleaned up. Added third index to InMemoryStore. The load and parse methods now take a single argument. Added a StringInputSource for to support parsing from strings. Renamed rdflib.BTreeTripleStore.TripleStore to rdflib.BTreeTripleStore.BTreeTripleStore. Minor reorganization of mix-in classes. 2002-12-03 RELEASE 1.1.3 ======================== BNodes now created with a more unique identifier so BNodes from different sessions do not collide. Added initial support for XML Literals (for now they are parsed into Literals). Resource is no longer a special kind of URIRef. Resource no longer looks at range to determine default return type for ```__getitem__```. Instead there is now a get(predicate, default) method. 2002-11-21 RELEASE 1.1.2 ======================== Fixed Literal's ```__eq__``` method so that Literal('foo')=='foo' etc. Fixed Resource's ```__setitem__``` method so that it does not raise a dictionary changed size while iterating exception. 2002-11-09 RELEASE 1.1.1 ======================== Resource is now a special kind of URIRef Resource's ```__getitem__``` now looks at rdfs:range to determine return type in default case. 2002-11-05 RELEASE 1.1.0 ======================== # A new development branch Cleaned up interface and promoted it to SIR: Simple Interface for RDF. Updated parser to use SAX2 interfaces instead of using expat directly. Added BTreeTripleStore, a ZODB BTree TripleStore backend. And a default pre-mixed TripleStore that uses it. Synced with latest (Editor's draft) RDF/XML spec. Added datatype support. Cleaned up interfaces for load/parse: removed generate_path from loadsave andrenamed parse_URI to parse. 2002-10-08 RELEASE 0.9.6 ======================== # The end of a development branch BNode can now be created with specified value. Literal now has a language attribute. Parser now creates Literals with language attribute set appropriately as determined by xml:lang attributes. TODO: Serializer-Literals-language attribute TODO: Change ```__eq__``` so that Literal("foo")=="foo" etc TripleStores now support "in" operator. For example: if (s, p, o) in store: print "Found ", s, p, o Added APIs/object for working at level of a Resource. NOTE: This functionality is still experimental Consecutive Collections now parse correctly. 2002-08-06 RELEASE 0.9.5 ======================== Added support for rdf:parseType="Collection" Added items generator for getting items in a Collection Renamed rdflib.triple_store to rdflib.TripleStore to better follow python style conventions. Added an Identifier Class Moved each node into its own Python module. Added rdflib.util with a first and uniq function. Added a little more to example.py Removed generate_uri since we have BNodes now. 2002-07-29 RELEASE 0.9.4 ======================== Added support for proposed rdf:nodeID to both the parser and serializer. Reimplemented serializer which now nests things where possible. Added partial support for XML Literal parseTypes. 2002-07-16 RELEASE 0.9.3 ======================== Fixed bug where bNodes where being created for nested property elements when they where not supposed to be. Added lax mode that will convert rdf/xml files that contain bare IDs etc. Also, lax mode will only report parse errors instead of raising exceptions. Added missing check for valid attribute names in the case of production 5.18 of latest WD spec. 2002-07-05 RELEASE 0.9.2 ======================== Added missing constants for SUBPROPERTYOF, ISDEFINEDBY. Added test case for running all of the rdf/xml test cases. Reimplemented rdf/xml parser to conform to latest WD. 2002-06-10 RELEASE 0.9.1 ======================== There is now a remove and a remove_triples (no more overloaded remove). Layer 2 has been merged with layer 1 since there is no longer a need for them to be separate layers. The generate_uri method has moved to LoadSave since triple stores do not have a notion of a uri. [Also, with proper bNode support on its way the need for a generate_uri might not be as high.] Fixed bug in node's n3 function: URI -> URIRef. Replaced string based exceptions with class based exceptions. Added PyUnit TestCase for parser.py Added N-Triples parser. Added ```__len__``` and ```__eq__``` methods to store interface. 2002-06-04 RELEASE 0.9.0 ======================== Initial release after being split from redfootlib. rdflib-6.1.1/CONTRIBUTORS000066400000000000000000000015521415774155300146600ustar00rootroot00000000000000# See https://github.com/RDFLib/rdflib/graphs/contributors Aaron Swartz Andrew Eland Alex Nelson Andrew Kuchling Ashley Sommer Arve Knudsen Benjamin Cogrel Boris Pelakh Chimezie Ogbuji Chris Markiewicz Chris Mungall Dan Scott Daniel Krech David H Jones Don Bowman Donny Winston Drew Perttula Edmond Chuc Elias Torres Florian Ludwig Gabe Fierro Gerhard Weis Graham Higgins Graham Klyne Gunnar AAstrand Grimnes Harold Solbrig Ivan Herman Iwan Aucamp Jamie McCusker Jeroen van der Ham Jerven Bolleman Joern Hees Juan José González Kendall Clark Leandro López Lucio Torre Mark Watts Michel Pelletier Natanael Arndt Nacho Barrientos Arias Nicholas J. Car Niklas Lindström Pierre-Antoine Champin Phil Dawes Phillip Pearson Ron Alford Remi Chateauneu Sidnei da Silva Simon McVittie Stefan Niederhauser Stig B. Dørmænen Tom Gillespie Thomas Kluyver Urs Holzer William Waites rdflib-6.1.1/LICENSE000066400000000000000000000027641415774155300140130ustar00rootroot00000000000000BSD 3-Clause License Copyright (c) 2002-2021, RDFLib Team All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. rdflib-6.1.1/MANIFEST.in000066400000000000000000000003751415774155300145400ustar00rootroot00000000000000include CHANGELOG.md include LICENSE include README.md include CONTRIBUTORS include ez_setup.py include skiptests.list recursive-include rdflib *.py recursive-include examples *.py graft test graft docs prune docs/_build global-exclude *.pyc *$py.class rdflib-6.1.1/Makefile000066400000000000000000000007441415774155300144420ustar00rootroot00000000000000tests: docker-compose -f docker-compose.tests.yml up test-runner docker-compose -f docker-compose.tests.yml down .PHONY: build build: docker-compose -f docker-compose.tests.yml build coverage: docker-compose -f docker-compose.tests.yml up test-runner-coverage docker-compose -f docker-compose.tests.yml down reformat: black --config ./black.toml . check-format: black --config ./black.toml --check . check-types: docker-compose -f docker-compose.tests.yml up check-types rdflib-6.1.1/README.md000066400000000000000000000174551415774155300142700ustar00rootroot00000000000000![](docs/_static/RDFlib.png) RDFLib ====== [![Build Status](https://drone.rdflib.ashs.dev/api/badges/RDFLib/rdflib/status.svg?ref=refs/heads/master)](https://drone.rdflib.ashs.dev/RDFLib/rdflib/branches) [![Coveralls branch](https://img.shields.io/coveralls/RDFLib/rdflib/master.svg)](https://coveralls.io/r/RDFLib/rdflib?branch=master) [![GitHub stars](https://img.shields.io/github/stars/RDFLib/rdflib.svg)](https://github.com/RDFLib/rdflib/stargazers) [![PyPI](https://img.shields.io/pypi/v/rdflib.svg)](https://pypi.python.org/pypi/rdflib) [![PyPI](https://img.shields.io/pypi/pyversions/rdflib.svg)](https://pypi.python.org/pypi/rdflib) RDFLib is a pure Python package for working with [RDF](http://www.w3.org/RDF/). RDFLib contains most things you need to work with RDF, including: * parsers and serializers for RDF/XML, N3, NTriples, N-Quads, Turtle, TriX, Trig and JSON-LD * a Graph interface which can be backed by any one of a number of Store implementations * store implementations for in-memory, persistent on disk (Berkeley DB) and remote SPARQL endpoints * a SPARQL 1.1 implementation - supporting SPARQL 1.1 Queries and Update statements * SPARQL function extension mechanisms ## RDFlib Family of packages The RDFlib community maintains many RDF-related Python code repositories with different purposes. For example: * [rdflib](https://github.com/RDFLib/rdflib) - the RDFLib core * [sparqlwrapper](https://github.com/RDFLib/sparqlwrapper) - a simple Python wrapper around a SPARQL service to remotely execute your queries * [pyLODE](https://github.com/RDFLib/pyLODE) - An OWL ontology documentation tool using Python and templating, based on LODE. Please see the list for all packages/repositories here: * ## Versions & Releases * `6.0.1-alpha` current `master` branch * `6.x.y` current release and support Python 3.7+ only. Many improvements over 5.0.0 * `5.x.y` supports Python 2.7 and 3.4+ and is [mostly backwards compatible with 4.2.2](https://rdflib.readthedocs.io/en/stable/upgrade4to5.html). See for the release schedule. ## Documentation See for our documentation built from the code. Note that there are `latest`, `stable` `5.0.0` and `4.2.2` documentation versions, matching releases. ## Installation The stable release of RDFLib may be installed with Python's package management tool *pip*: $ pip install rdflib Alternatively manually download the package from the Python Package Index (PyPI) at https://pypi.python.org/pypi/rdflib The current version of RDFLib is 6.0.0, see the ``CHANGELOG.md`` file for what's new in this release. ### Installation of the current master branch (for developers) With *pip* you can also install rdflib from the git repository with one of the following options: $ pip install git+https://github.com/rdflib/rdflib@master or $ pip install -e git+https://github.com/rdflib/rdflib@master#egg=rdflib or from your locally cloned repository you can install it with one of the following options: $ python setup.py install or $ pip install -e . ## Getting Started RDFLib aims to be a pythonic RDF API. RDFLib's main data object is a `Graph` which is a Python collection of RDF *Subject, Predicate, Object* Triples: To create graph and load it with RDF data from DBPedia then print the results: ```python from rdflib import Graph g = Graph() g.parse('http://dbpedia.org/resource/Semantic_Web') for s, p, o in g: print(s, p, o) ``` The components of the triples are URIs (resources) or Literals (values). URIs are grouped together by *namespace*, common namespaces are included in RDFLib: ```python from rdflib.namespace import DC, DCTERMS, DOAP, FOAF, SKOS, OWL, RDF, RDFS, VOID, XMLNS, XSD ``` You can use them like this: ```python from rdflib import Graph, URIRef, Literal from rdflib.namespace import RDFS g = Graph() semweb = URIRef('http://dbpedia.org/resource/Semantic_Web') type = g.value(semweb, RDFS.label) ``` Where `RDFS` is the RDFS Namespace, `g.value` returns an object of the triple-pattern given (or an arbitrary one if more exist). Or like this, adding a triple to a graph `g`: ```python g.add(( URIRef("http://example.com/person/nick"), FOAF.givenName, Literal("Nick", datatype=XSD.string) )) ``` The triple (in n-triples notation) ` "Nick"^^ .` is created where the property `FOAF.giveName` is the URI `` and `XSD.string` is the URI ``. You can bind namespaces to prefixes to shorten the URIs for RDF/XML, Turtle, N3, TriG, TriX & JSON-LD serializations: ```python g.bind("foaf", FOAF) g.bind("xsd", XSD) ``` This will allow the n-triples triple above to be serialised like this: ```python print(g.serialize(format="turtle")) ``` With these results: ```turtle PREFIX foaf: PREFIX xsd: foaf:givenName "Nick"^^xsd:string . ``` New Namespaces can also be defined: ```python dbpedia = Namespace('http://dbpedia.org/ontology/') abstracts = list(x for x in g.objects(semweb, dbpedia['abstract']) if x.language=='en') ``` See also [./examples](./examples) ## Features The library contains parsers and serializers for RDF/XML, N3, NTriples, N-Quads, Turtle, TriX, JSON-LD, RDFa and Microdata. The library presents a Graph interface which can be backed by any one of a number of Store implementations. This core RDFLib package includes store implementations for in-memory storage and persistent storage on top of the Berkeley DB. A SPARQL 1.1 implementation is included - supporting SPARQL 1.1 Queries and Update statements. RDFLib is open source and is maintained on [GitHub](https://github.com/RDFLib/rdflib/). RDFLib releases, current and previous are listed on [PyPI](https://pypi.python.org/pypi/rdflib/) Multiple other projects are contained within the RDFlib "family", see . ## Running tests ### Running the tests on the host Run the test suite with `pytest`. ```shell pytest ``` ### Running test coverage on the host with coverage report Run the test suite and generate a HTML coverage report with `pytest` and `pytest-cov`. ```shell pytest --cov ``` ### Running the tests in a Docker container Run the test suite inside a Docker container for cross-platform support. This resolves issues such as installing BerkeleyDB on Windows and avoids the host and port issues on macOS. ```shell make tests ``` Tip: If the underlying Dockerfile for the test runner changes, use `make build`. ### Running the tests in a Docker container with coverage report Run the test suite inside a Docker container with HTML coverage report. ```shell make coverage ``` ### Viewing test coverage Once tests have produced HTML output of the coverage report, view it by running: ```shell pytest --cov --cov-report term --cov-report html python -m http.server --directory=htmlcov ``` ## Contributing RDFLib survives and grows via user contributions! Please read our [contributing guide](https://rdflib.readthedocs.io/en/stable/developers.html) to get started. Please consider lodging Pull Requests here: * You can also raise issues here: * ## Support & Contacts For general "how do I..." queries, please use https://stackoverflow.com and tag your question with `rdflib`. Existing questions: * If you want to contact the rdflib maintainers, please do so via: * the rdflib-dev mailing list: * the chat, which is available at [gitter](https://gitter.im/RDFLib/rdflib) or via matrix [#RDFLib_rdflib:gitter.im](https://matrix.to/#/#RDFLib_rdflib:gitter.im) rdflib-6.1.1/admin/000077500000000000000000000000001415774155300140655ustar00rootroot00000000000000rdflib-6.1.1/admin/README.md000066400000000000000000000024621415774155300153500ustar00rootroot00000000000000# Admin Tools Tools to assist with RDFlib releases, like extracting all merged PRs from GitHub since last release. ## Release procedure 1. merge all PRs for the release 2. pass all tests * `python run_tests.py` 3. black everything * use the config, e.g. `black --config black.toml .` in main dir 4. build docs - check for errors/warnings there * `python setup.py build_sphinx` 5. alter version & date in rdflib/__init__.py 6. update: * CHANGELOG.md * CONTRIBUTORS * use scripts here to generate "PRs since last release" * LICENSE (the date) * setup.py (the long description) 7. update admin steps (here) 8. push to PyPI * `pip3 install twine wheel` * `python3 setup.py bdist_wheel sdist` * `twine upload ./dist/*` 9. Make GitHub release * `git tag ` * `git push --tags` * go to the tagged version, e.g. https://github.com/RDFLib/rdflib/releases/tag/6.0.0 * edit the release' notes there (likely copy from CHANGELOG) 11. Build readthedocs docco * `latest` and `stable` need to be built at least * best to make sure the previous (outgoing) release has a number-pegged version, e.g. 5.0.0 12. update the rdflib.dev website page 14. Update the GitHub master version * e.g. for release 6.0.2, change version to 6.0.3a and push to GitHub rdflib-6.1.1/admin/get_merged_prs.py000066400000000000000000000017741415774155300174360ustar00rootroot00000000000000"""Get all merged PRs since last release, save them to a JSON file""" import httpx import json from datetime import datetime r = httpx.get( "https://api.github.com/repos/rdflib/rdflib/pulls", params={ "state": "closed", "per_page": 100, "page": 0, # must get all pages up to date of last release }, ) prs = [] if r.status_code == 200: for pr in r.json(): if pr["merged_at"] is not None: d = datetime.strptime(pr["merged_at"], "%Y-%m-%dT%H:%M:%SZ") if isinstance(d, datetime): if d > datetime.strptime("2021-10-10", "%Y-%m-%d"): prs.append( { "url": pr["url"], "title": pr["title"], "merged_at": pr["merged_at"], } ) with open("prs.json", "w") as f: json.dump(sorted(prs, key=lambda d: d["merged_at"], reverse=True), f) else: print("ERROR") rdflib-6.1.1/admin/print_prs.py000066400000000000000000000007441415774155300164640ustar00rootroot00000000000000"""Print all PRs in saved JSON file in Markdown list for CHANGELOG""" import json with open("prs.json") as f: for pr in sorted(json.load(f), key=lambda k: k["merged_at"], reverse=True): if not pr["title"].startswith("Bump"): id = pr["url"].replace( "https://api.github.com/repos/RDFLib/rdflib/pulls/", "" ) u = f"https://github.com/RDFLib/rdflib/pull/{id}" print(f"""* {pr['title']}\n [PR #{id}]({u})""") rdflib-6.1.1/black.toml000066400000000000000000000011411415774155300147430ustar00rootroot00000000000000[tool.black] required-version = "21.9b0" line-length = "88" skip-string-normalization = true target-version = ['py37'] include = '\.pyi?$' exclude = ''' ( /( \.eggs # exclude a few common directories in the | \.git # root of the project | \.hg | \.mypy_cache | \.pytest_cache | \.tox | \.venv | \.github | _build | htmlcov | benchmarks | examples # No need to Black examples | test # Tests are a mess, don't black them | test_reports | rdflib.egg-info | buck-out | build | dist | venv )/ ) ''' rdflib-6.1.1/docker-compose.tests.yml000066400000000000000000000010731415774155300175740ustar00rootroot00000000000000services: test-runner: build: context: . dockerfile: test/Dockerfile volumes: - .:/rdflib working_dir: /rdflib command: ["pytest"] test-runner-coverage: build: context: . dockerfile: test/Dockerfile volumes: - .:/rdflib working_dir: /rdflib command: ["pytest", "--cov"] check-types: build: context: . dockerfile: test/Dockerfile volumes: - .:/rdflib working_dir: /rdflib command: ["python", "-m", "mypy", "--show-error-context", "--show-error-codes" ,"rdflib"] rdflib-6.1.1/docs/000077500000000000000000000000001415774155300137255ustar00rootroot00000000000000rdflib-6.1.1/docs/Makefile000066400000000000000000000056611415774155300153750ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " changes to make an overview of all changed/added/deprecated items" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: -rm -rf _build/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html @echo @echo "Build finished. The HTML pages are in _build/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) _build/dirhtml @echo @echo "Build finished. The HTML pages are in _build/dirhtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) _build/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) _build/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) _build/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in _build/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) _build/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in _build/qthelp, like this:" @echo "# qcollectiongenerator _build/qthelp/rdflib.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile _build/qthelp/rdflib.qhc" latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex @echo @echo "Build finished; the LaTeX files are in _build/latex." @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ "run these through (pdf)latex." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) _build/changes @echo @echo "The overview file is in _build/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) _build/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in _build/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) _build/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in _build/doctest/output.txt." rdflib-6.1.1/docs/_static/000077500000000000000000000000001415774155300153535ustar00rootroot00000000000000rdflib-6.1.1/docs/_static/ContextHierarchy.png000066400000000000000000000563161415774155300213570ustar00rootroot00000000000000PNG  IHDR agAMAOX2tEXtSoftwareAdobe ImageReadyqe<PLTE񑑑߷̕ǵ℄ І yyyMMMß梢 $$$͘~~~{{{œ݂DDD@@@000www***}}}Īnnn ӻ)))... !!!444֧QQQ|||(((---,,,PPPOOOZZZ;;; """WWWiii222ooosssUUU[[[999'''eee:::///xxx&&&kkk___???qqq666GGGKKKjjjaaalll<<<^^^111FFFNNN]]]fffVVV%%%EEEdddTTTHHH\\\RRRCCCppp888AAA333rrr>>>LLLhhhIII===tttggg bbbzzz+++777SSSuuu###YYY555JJJcccBBB```mmmXXXvvvZO tRNSfYXLIDATx]XU  E"(bwbwwwwwvwc]k]Wuݎ@{[͝93sΜ.Y_{nU]=haf:?;W5ίcEjXzN_s|!j6A=${5?d.P6s^a Jյ[7{VOl Qw/VO>~}%ܽmRyÚ4&)/eޮ?l{v:-s'T%&"Gmٳ&%*.62:x֞ǣ>}'iпm:xTѿ o?Mg~Qf:3}kZ<8ƈBZUrO]uiZк;sآce|c "ٜY|c-Uj:q{^}\vt0DoV.'=|in_ OYOl1A֔R KV8*woQw.o*߬z.jۑ awc%hrǺrynPoҥO妞,c?73Ӻ=j dB~ +@:;*U[oZb'IW +"9sCq}46vMeݔb&\h]èOrŬ4'Eyް*a!S!gdjá۱7o=ξ>Ҩ0$~Lx-X=Mۅo2o&x9(g|7-($)aAB?Α6sF+5#%tiA?/1[؛;읎Oo\ҶaaB "QuMwK}HNUGSgԕQ|hnX7/Mošg] ͫr]kK1 oNs$k} k\ :X +=^8gPH7Z圯9uYO. ZFݫvuWH̥Tt,- ;x2\ Ov)M9QMnXȚ_IgFkK)Lì5z kZh&nsw M{ulwYcՈճ $K?Y,\u}Tu'=ZR]{p\ W_X~/-ӧԅejTgHnKT8tr0WJCeOWRي;n38ۚog=~ 5 @p(ž,\:$@Va 濴[bIAfK]DK33s%Db1@o1,7A1ROH2sx;k2_*5Ћj=gD%%ѻ~!^5D*(Pps9Un+t msѮHF)ot>_n;]Z M0gO'xG]t~ftsH:6<[YeZ(:i4V}{k" `Y`sY.H$$Q\.2ruˎK$ .:2J~, ܙ.aSIFӽOHkXhiX-{ (( RGӀ9 㗟jcΖ([$x66!R;͊ɷ,`UYD>'bīM_d[锕ڱC`V4 wV]8l(9 %bīcySnT=n1z.Qzw+ZӇc0EB8t9GMfRvP>j#3Ir>1[h;=Lc/d8#^ WѷBGվ[h}irwVBt{ NQfkI .mr;lO6䮷GaG7@4"94@:^Hj{[߫#b j `mYʈWr9T1{?̫-,,WF:aw*rX[T ͥh/9&4f,/~".CFj۵B,!6&0~eX9Y Pדe`e;Bo7,pL$X6 *kpᛸ7>|sq'1p;<)RFJSSz|l H/,eKiN\!5 \$ǖQ6 hfo>Fa=1Ejܝla&Q[#.kHr^'?c#9s#+ Q6!vK KlMuCr>PaN pT"Sz g:fY \6i@}[2$)ؒG&/ n̛ՙB) 5z Jwɽp,Y̜\/=6)78@ʗtQG]](! P@itw>#y3PtH*C1ԉX)^U`lE5Y̭ʡ:];-tTd(t=s 3 @+[ը_fMt 5LymRq \)M2T)J؛( ]bLK#tSRgqp̉yLo5v.!d0X; c}k<Ê4Ycj$ٺon3.!&|Ȳ|Bt}co41`]""m\mfo|Ѕ3= tQ,s/l9@,~KM:@2lbeYa\W EkK|_Tպ:nX9@J㨖: nCr͓\RKk0S~OөdH*xv +˜f,0P |-`_͒B̴IQ][̲ a+qZIMť~FH{&s )zK]BHufY)Ot &=U7Ŷ*(] LNp:w=u *LvBzAF>: ws; )O~L*trF,%0z |fw,6Z!US6_,Hǒ]t9֬{|4`낪%K^j䙲st ͗j̶m?(1IvoMn"=d'/җ@ nv Z̚y~} 6Iٕ^`*nĽ32;PFoj'p/c `47)4u %;PzCݥ`DSV"UUgYxKrBRAQͪS|NH[V TV6S\a0@H& YhT5 S  hXB7bWO]@5Jtrk6y#cm+d=6}(w)UqU{X- ]#nV*9Upv.i|Ԩ%O W< tZqL[īe.7_ _,c*0TcO ڄ qP,bu.eU&Ys^y=⩝"|t;JkE Ht 3T{Uܧ-OK{KahW.iMw2PpN{Jj|PV*2"֝&-`TJo`&ۢ3DY41TƏ$Jtsi ^җ<6s&)Mv/3v|f@=xU *Xr!ޚs$z?(X&ijrx8*M lQޠ*c)P}Em+`I?ċ^,~]HYjEL28vf.sK5m/]|T 5uwo3}1Y *WHx񬧛'Xzto_OxFsH. k/WQDX^rD"W1|y+4Tr?5W`mJ< bR>]{Ԓ}٨: Hr@x=Afjtaw~@|n':USr%Ԩ@%')$E+cm ضQ烑+#}] D(o~qʅ;IpeSd1G?!mxj8̶P@L0<݆8KۙTwl ]5!o$퀿6W]E<<r34K1H|qՖE݀ 1=F+mPYi'Y5x }0oҞ(Hn.nX`""oJ2jZz oTZyOUhZ"d?nhg=Y6Fw^J/brk{}<~BG@僡|}]+CCT;>`}Kf@E[3bD^Dn׋%{R|',jiar (&@f)+ sD1t o O:.whSC`O',lO&|wl*E<"2P'Hu^yN^_!TVS%RR?e9n F ʾ5}_ |V)|Dϊ3vhw-V+;o aD1N*JN;w+G@0jJR7ƥJMRӊxj/4U}VJg藧X wQ19>Tz\/͌R zMOD9k$jgVXLk#J>iq'Ϊf5Nkr"G `))N1M}%}z>Q0Ŋ+{-N>h hf'@1inA)ayZm%z >X5RNG Z6T8y@QN:}iڭ[_\n?FscYk۱k(ev)?XeJ#|Θl3sbj)3iZj X`M:{^AI(@Jj塚<݆8H;R/ |e& lؔxF_{֒ox2} Ww=@}sw$']]wtoB l~Y}ǺRfW9#^ùchMGGkC i򿞛XLjy.=pAfh,3| < ΟWFRSwuͫqo^+h3L9pzp_ױ9Hu_Rgq{+cSďCؿ1k o7Y#fn$@aRSfG?mfx{ٍ2赽 @,Y}w@ >F΅늑m$~ۘ7s:HsD~5a.Wsx5;Q/!n@j]dS<{b2+޺S'4Eȏ @[|I ? ᨢm}^ڒrwJ FW~˽R'=O&k^3BT% >0H)~dYsdݩk&ɃH/Sbnw6;jӲJ~C?p|貸Hsa1(w/Bj^5j/ ~W2teu,Ŏ{U x jG*Dsָa%Qyz r "Khen̘(C+͓{'9>RGyD ،?I/[Ӊr1Sj1yhAt[5ḱH1:FARq)Y&Sivlc{!2,*>N EN9#"uP1uK  J&|ٸx: Jy f+r0dTB*˭v,VTGkjNRyfZ( N@5˿E~kxqdNXuZHN' %h{ }VP^)\sjXBHq:IyIga "ߗuU)&R, )媳4i5"&\fǵߗbnA_ࢰg^8!;xmnjn Am>-اⅉsj@OBvJS1o8pI!je~W}i6cp#L~MYҴߠzfAAeQh)_Lޫi!IJ #~Np#w.)Fïz6eCnWP .S[-!_ mfaIUoD}AbQdC_$%O-9rx8\ox j#[.TXTlm>jEslFd':~ gid>&E͜:5ik70R8=ɱv~:Q ~㥝c@؟}օZ7}9BXw# ^1$9FXT]@()0B/'S3`!ЎtuR&D~hFD2h5Lj{ܷw큻o}.WGt%l;6! d- S1L?@sz07ΡM,13M}j ƨK;+" &Njmfeۓ&Ra`UO&&Y~_׬fEvdgYNR5Y1:wq6uK ŮR+'? jB0zǞjUya7)V6Ȫ$㔚^Rz /yy}]j2#KbNu \g $0?P"n ٵ @AcQk W i)l D>ԅUҫ|ɛ㗓yu(#-pXFz.jbaoa*TK5F 236pCуd'=rui򿋌\UWbD28 o9js1LH'xgX#%x8xtSڔ`DӀ=թ5WL1ۘ2*#̒dt'brxsXA8  ?zI<9f]~镁.]jl[%5^3ܦ3siև3Xȃ%ez'R{vcJ8OP{K-qR)Up%Ax]UMLTa~k8hy.B@s\?3ZGȻ7%~毬Rӗ{t/jl=)6)+.-e %kDRiVD3O/k61'h-1)DB kU1.%^,0f6&9䚇#c)WNq%|٫ OQK5*tC&+ bK˲mE"rnSWtm'.J[':@t>X 8ec5h k{^j<qfC%PYzs[ +Իj- Uy[Rǡ#b̄&ZSz:*]e,P(]2o ՊGqADž<4"/F/]S]?BJ,嵹)q pPT^rݲlc诧!RxljaFqS*2s\_1Ujf¢ D#++@Ȥż5n33F)Ժ #vm7I}tӥ wt^(KcU`l tYv!IdȪP)u#vOʉ6vJ`UX#/ֵ@"l2`N=!&NcO;݆;8@hC73l3j4 B^ʑU:tAM%2@2[u^qcZTm`Lkcuh̲Gα㡍FEwc&u/v.r88YW3"#'=qFZwIB2Ew)C#/m=; AL d)Ʀ1 B\Jrd q 1(,B89PBEqm݇-5h`8~*8G M|ڧ)v]j}'9AAL7XjXplw6TJҤ^s91MG+9b= 9!ڐMeĈ֤!tɐRS {y,qDBjkU#g@u42ps -;AKSj+;ȇmB ;# dӢ x pȪSOɘ//JdtaH7ii-qڭh?;D( :/`[I[ WҮ$iV<.VbܔnL#{G/cdPցEU޾Z|τGie #}kcf[w[!M 藝C ;I !TRQHS4rqAlx0R @utѶU]8 Hfԧ UU (xBnHS^~ @Hӻe |Gls5-Mdbcc#xC8wXvi8AGrٳ0@Lx*pui,'>>o| x->B駭+Pilb3 5V+#:ܺJ4*hXcu4Do;H$J/~s{Bį)J ,Djڕ&4r-Q\ [FR`J#E9/,⌴돑&@ZF~:޾*la_xET>J[U+.E|E!#fw3 +8*8'< @rWaCGӾ2MYHa1|]vc,>9dǨcUsL)'xoD;GYRV99Y^a:M ٕ׮9)ŵ&o_>KaʧN9~u7o9_]3_ 2 GN@fE\ I-Ɩ~*ȈX{2p.ΑՖA̖<uĺArR+8BH׃7(|G72eУ4ccF|-{m)Bbׯ=?_"?H,|{>n3uMK2s`6@H5_  _!BW=UTZW}eUɝO/&dCٲFl wF~hJV&wZNƤ gUubtKWPTHҷm=(|U߄)Ʒ-2BivoONB; ys T{Cj6'l+cj`psn ̔h EHϕDd?䭕 H#Oz090zYۓ 鵇3YA9NIS>["SVBJM ƀ  y<`bc1,GRsM9@W#V @ɵ>H oVSZ8xz^{Rw{m"B z4?a:n1YɠuؙL8r_o7$7@2׬^! >@[x;dWt3&Uo#iv0Rʤ|z9+~JY1A 9!_8ޖ_O&5!KLNǛ2?>yz"@n#A0Ƞ qE QAsTxkh\5^Vd%Fn潋ͦ"v c)5\l.9zM. $!{b@Jmg$ .=/y*xt8Ib¼/=;`dZ-WS}J0D /jiCWFT@pHI(l&JfS5@6m>OD2"q&f)0j<!b`o_L;ot#VE"ͣxOZ֝P$/@%z{3ڡ=+E``hgW?!cCuI(!? ?`Ij!^vlEQMGm3E`識-cECF->T7&>@>yB^k'"o eX >%lwsPVtBN5$JKӴ0J ZP-a$ 2'ۛ hc(j2SHx K0hN #Duxo/c1 H̱r}~HC[zN3:T S9CڷmDǨn%B`:ڶ2DM9}M]" ՇԚ(fHo.ܚ$eWE)R藣g i-b'@<*o:d഼yd1PI̥gJYkv u ȾRl+;[si%a4rtYXRDW!''jq» >@2su m'd--38 Ԓ: 1[}^./@X5`ίks%Q7@5)>Rl^]MLg}K@ZwN|fVu*;6SLrFߕPDgR}^|*ۄ<| VhCnf F HW`=oM*ꪌw.a~%럻@R f@%Txh47o&ḑ2 ٧u]1E$C7ΎB;q2._u d?܄ɴmˇj]sKɍ(T|5f螐r(rƩF$D?,50=d&܀wEA;V\[^{^ReE$ڋŴgl @1Z}[c j"%;DHBD5SjxXqxV7L"Na}GTa2'$Qj'#H?*|4O$7sz?:ۯpjX:f-4=mzB8jD=Uph)n2EN$ MDi@+doarjYX&u{k/~ZL&HLͤ IRӶŷH#\W#qGa)/RqV\jv~xx/}g+ʶ™b>].|,Yյ-!m|Bm7rFwW;l6SjBj**g,%l SbtbҒ`ttIb3źOjF3龄 g4$K6QDۮ @6'}<bT;ܷ2k$쉣b.ۢX{wkm|KF^i5ji1aЭ([ӵ]o]5Y#mp6vL^k& =zK2 DɹqO|6`W%! $]*'L)]LF_bNM9N~dvmɰńt ``ċgt7NLp%מR?c]&j;Cb4SO@H0Y5't$HxBfpzM]LcԸm Bt#F:`27!K+1AT`V ]{ hGf.`C 2b!)2O kKF2`%?Ɯ.HE{+ Zzb-5ʿXZ{jq^[Nц-.m'$X_Ofh=qCZ](ߩ-)%Q n @^[- %<^9d:( : dYRni_Y L)C{zJ|yG.@$\Se}q7z`柑Υ(̉1fG۳BE \Gb _cjpaIy>4Ӕ>~`_.bd{>T<֔{ m#ƅ `OZt i;#ٯў͒X8] d֠ bw5Č=}ك˔4RIKn_ǹ @;햹:*Bx߄YJB?o&|BkHV£*Z2ۂ^9i~Eb;Ed'Ֆ\?,Y؃^ Iܒ@} Qhat^oꕎ ٲd ii͊X}Zn^!Y/Ym۶,dw{d/Ⱥ8B.$^~$&!f >buR|' ?bLk&Qs S0TTB/"MHLDdB:fu/r!s:6Vj[ovu!B=@__z\V+=1 ,O&^-e OˆG){`pB\Nl4 >ld @m.4<Țb9,{!Vr*r W%`~2v_Ƭdf^yK&ş.Nnߞ3vK1 ^w-uNs_17 |+ c4I* C~g]~MW|?uOSW uos%&u39INjvt(#^xXS'*JBr/I/pI8B"xd_oATwn* 4OokR Jg,yv M";FIQGY 8n';yOj}v[DÀa}.pt XSVoG7 B*KΖp YOEN'Bd5 1 2`5p\Lж wE>*?j+JĬDdpܝW _˜;-I! 5&ǿXܽ,ztD^TA 0G=a`"ȸz5Q2 ?&9`4lWyx’:rHғ*0*w1ꔭk'FO %J>ˁ DJqwD]gmV|)xXsHd_Ur73YCo2UkRhy#3q=/v U 7fyAg9kُy*w'3K-C9s3N1e2k'o7,+j` ^rx-Mi Kn[\JؚT;*+zvW/s=`ov@NX:QLffyPv86ea U⋸:%Ֆ'ut=r|'\h]C ![+|Zߝ 7abߚSۗ忸ɛǣ>}+i䖆Zpi9sQ Z6z떧|~ 9BRvLsKQ' 0`*zqߋ/wN|.T|'/?[%$˼]u+nU8CvR$VCwɶ1+]՟0!4LW0m2 2 2 2 2 2 2 2 2 2 1(zDQ=V 4j+¥?ϺN:S@r[Ȧr}*TxV۟ZLNϘ+݋fHvyʐ хy*=yT]H„鲼/*KVj%NA 6>QY#<ة2)~8TW}קS$i~H\Vɣf!Q?@Qmv0G >nGg{J}A]17AȪ3g~* e?U|E9QJ?c\E?Ȁ'])5Ol(pSvrʹ ܱu ' +nX&YËko1lRC{ZgmgAv,݆ٲF0Fg )m/?mew7)Ual{>lEvgZya 덇;Ź#cK%OIJ&ddP`)ׯg/Çr2yڏ]f}i%ڝQz}tQ F&#9Q{6 ħafYcD\)D@HT}`W־wnС[a2y98 ?>eFc}R Ӽs]xR\.AC~4"o9c W@g۫bI-Y~q#K Ev_.?sqlr"rzA8s[m82&[ĩW.dr/;oGJqV}$DR S+U6s@uNHlс+dbui&A_P@:7l 2UƆs@Wa ~{Le ق2>,c2n2ۯM v'](2ҭ(]ԕ  Z AO @|& DFs ̼>CNZ6ꄙ oe՞K; t6̗9uG[z3-5L5f] 敐.,29oZ.@V! [le_y{$ -k0N~@/@&d"7d"f369 F4DtӖjT+Hu@vI2!G] z…iUed Y GpdKf?>[_ICgҬ?:Z=oM/gyY~avbŅl\vq_xa|qJVPirL ijyҐ\;((h_HtH㬠.H*j$7#l"Gt:9ga1Db5k˕3f3@v xzw_F òă\fށU4*~в~X ' ɤ^,_˭a[nFd 3>H;#3_F ɩ K~ L2".V:6{z HH}8 IFuHra7X~JNߎ| 21J\  @fos>݃(F62\ .)8}7"IfE)*?)ȹܛ)%3˄91%z9@d[nQH2d/ s(ۢ~ id6dÇ/l.,st$\Bl3dUR![o Xp"x31b;eT=1M>7rQKD'-;>lBdv;cyd+w9@S?cr{TAg D]kѐ)%2YB/| fg$, pD`v˺^ NMB-d.Rd+1B:a#wectiB`r8Ku {ڭ:Z)0\NN 0p@f"mŏg)5< I NauOcO 'w .$ܬw7prA6S'1ϭ4+YYSZWdݻh^ ζ -HAF΍lf郲)H͗[E!`}C<9).9&28{aN՜K*I#zZn~ը 7ϳ`{Rr]> Sv#G' pqM&`]+ʲs^Y.W]|H%3.}`td1 \)g+8Q+UٜJH!ĺ9)u"),T/[*9*W#|:d|t_J{-v+ͺ,mFr3d}RO}>C^TeT=32ӈ-#%2/r^NUWsjum}}kqRoUN,V\NZ;p '/^^vȺ~ )/kbj947 *#ĉJ| 99"J($lYe$"u |%#"6A/K!W Bwo䄗I»@8ǎ<W';붦;!+i ~Fߔo5os>U88Fξ[SrtjbKGDC pHYs B(xtIME$& IDATxyx]Uk9 [X{kB(1U`'R%0|ohRb/6JWKsw<bpJEUS AJཛvb*[+7X V!f:Q 5Ͽ^O :!M|DH*Xir4W)6A(ĘK+ip"d?Ko(2ѥyfMN R|u}L[ :!r*(>hDR/o[4F=pY5E y;-B Ͽr tBuU)VX`5/wxݍP 3.NX`At6E{u4-+VDg ݷ74rf]:_<H;xM@Mź%K%QSm_]* dxTM;tki I4 .?ӵD¾՛6 ?CEϞ%nRG| [$\#b5qi7/cƾ՛7~Uon4@|&\d.Xx A=1&!/5ƌK =T4 YӥEaN eE CHe{9pW&dOX8c4Y(e}m/;pB$!5SDSSW'߸M!q2I_PnځPЉئDMʅxt-/o@N1;m@(DHMΌwIj,E5} y< wmUXh BA'zʣUk_ ; h BA':<0.J_ըX:o? OBx@(Dό}w׬z'$D^:]gѨ}.5}E[$@NW@(DfB!i==niQz MS NtTQoo HLA't?TEɉ>G8L/ !2 ϑ̈h)k]P׷F NtȊf^@1rPЉHטѯiY\tBA'tRt2S"zN(Dwأ%ɔ/,J]BA'$E8(,tA*@(D_9Jx} ,<:gOP3d,HSF脂Nt71DV2W2K2xPЉbhe˱,/ɳ.I & thwW`݇& tSeE8V1naB( oMB"ͦ`f\oyq!$p""Fe̵KMcՀG.&?!@7>W: ? kB_b@xX}0`N9^XҜPz9!tQOT a@ąowګwlؾGL۟x+qp}4;Q'an tB -ˎuV_d?%2xlD91^$0sVk饵p䝧 :!g/_i[l1VzVˀ8MϙS1o@޹悰@0K 떼HI{gs`Bk&ki~ TMHK"5 YA@p{1 Gn+2&%ť>ؿz3aBA' s wUzkߥ^8i]UUXR^crܭE:Rcgf0{Ra]ґlt;44.<ކGΣ; Nѐ2eG L.N(脂:*8 J:J!Hx+0 7K=wJ!3S0~d&1iE9iIi癓=ml : : _ :υfbR8o4"_0T}8R7=mt@(P wk!:΂:_rU q2>,&7@spϭȩN r'$=USuM-ǨOvXp׍%O-deB{v;P !U?cS)E]Wcȝ<އC';pщ3-.4ǃw/'Mg%a|I&&=LM׌~յ׆9RJ$N1Rv*zO ~z;*:Cy<::ooRfO*+&bɬGR tcGzY-P !C3u}ŜxNu^z)j;cGy Mʅxei8!٩Dho/Ǧ, \%>,,m^yOo<74AKzlCyYN -oC) ̉uBA'cjM 7aء?+U<~44D?6x*HND(脐n-vZH5>ORf8& O?y@FOx5+~KwnP !Y7/b+3VKtɝG'y况tDBA' 3zI~뵽 T[茄NZ w[pB@OظLb-N3J'tB o*ZYq,Z=ФLpwA0{,%DƒFh#{3+[BD^=JVs F1aBɽn]Jc6=ي56><^}bIAC7s&a[`q1פį_:ۿ9aPtMN˃yݭFHy|յSR@C :!$֘,r3fڰ?ǃu/vFCs63 4: YIZLhh3S8bW;(脂N I+nQEq9۷W7s?|-]75-]B# tB呪R`̝Rms}gFoA` >#D"#N7k0seN "B2J)Iƈ+WU[a4jN J_Ff]S\J ~Q4I4B."(Ay<~4}0C =B2's#Ϭ(&J`-Ul`8@k^4pbg1#)^soTKPMB%dYz ͉NsX5@O%Zح0MBT FHhŀ(*@#oSW$#K[ӍјY/R2E@nЧ!$ LG*Ǥ"NH&C_z^Q +M m=3!@9.H `;2XHI;A7&V!O:ߣ*|]Qkb;*ʁjY 5y )$T45!]ZP6sI?D BܤI@=RyUI1ew+ DAߘ|R.@(EpV~ nooj#Ky(C0G|Y/-SvlY(a<Gq ^J%aC{-B6@m^wFM#F!dD$M;/[K1-5Mxc ם9Lk/ >!> ~Vϛ(ݟj|EXd8,SIVA >0 1E;TM.#t>f?A^LcE|ByR5P}^_2y,n=FГx]VN /+74[ S@%GЅ?V{fL\G\>% roE:=|v( z93-.x7rPУ+fn 7L -aGuKn+D^V?;#FF{v@KIEAs>| : t{WTMp(u"-Fе˯+WQ?[o66Ko'zXH-!3[# hHNl4BqY;?!Ru'٢|YadNt泡֜\aK'ő*k ES<=a}oe-,uJOω ڋ?l"Ҙ"`0]b6 8jM602'F۝3CKݧA˃ųJh`eiZc:GTNWѣtUW@ʡOuW`%Gn4l*k)6G uK?{tN7<ٞ~(Z0oj HO"uD`̮ Xl|nLb _aU^4ngœҴuE<$-;q+Z!=|2y"{Ͽqs&a"u H Ӕ)Fa6ˣDVB"SCHvX`TPet0\\Ayhp (_VH|^K淠mO%nGDg˞d*!VVK#I Ι%E^f3~WFTkFJt<3jǝ M_%z߹^'](dN8L#v/HAktv"7O|҃^fY ?G$zCJ)<<@lnz"5j%P~aI@Uav N z z.YI OW.Z-sy9i||L'H='}|; b81!w۰e 9&ٗPc +M B)hݱ='&BJA ̓9oIc$0BkV:?`,]Ěs#p~bY4 nYL5jFmGbuzܭE4AqGP@%f?<͏=+Gwu9T0"B"mqd-]C7QУ( t=| ]3ȣ1 b"gsZh rEur(}p~vU;3c!U3 bPlYș& "%uVHA0St H:3)יIc6HQ !MWkEvP& z4,{A$^GU4sriN`Ib0!5jN?Lx:A15m ؼз jL]f %V aFPqQK"ut+xns`i5 3Oκ;?-ɽșNu!gQ#3Q{Z zzl@ 24w$H @JoP$ sR?,QM%J:>QeN V$4>,sY"yYt.6 IDATW犯:IMȚGW`]Qlu&G|It8Og9;3̀OzbSN2`n% Bnκhev"ei8 ELl](f Gy| !VV1uss2~-s zB(,@*숥UHg"aGA3M0Y KO{>''׭B*ՋSH.>sroԾ745q\ e 0[@Wd_P9ַ܈k OA!?"q h2`Brsڐ4, 麎99~m3[R{6cf?HрSp W4[V CF6 l(af&&{j,F\8'lχ=֩H)> 0騦0izO"ЯӞӂuʗIH*l+okȜx`b!Gq퀧]]r[f'2Wn[bXӻ3s;߹&R| T1 SO BdPj^L&T]AltVW U8]5Y7 }#:8B ̂+Btj/sr/2Qd(Rسۑ^Z gE&icՄg~akHE偪 blc+y0ʳNq\JpՏHFpL5 kfPlm^(V/r! o:@ }d@[A^Zz6RݑognL?yZ[C~Iq>1ܥw:̎}$E(IFÂ$ ҒP5~~Nw = %A)+z"$REj8$Eˀho ԑ' k;Eo-ԩz?w5Fi!]JeN<'RtP}ְ~լ`zYN)Ʌ9>Il;Ԍm/9H4'&-3a~9#Pev9M Ke.kYD|Y;TL|Ǣ!VBΉWo#a;XdRfNǪeA R,,Pȣl76vlC9qG.l'`1G1pnD#Mp },TU}L}tixtgdNg`RdF>ǸLWnWvƏُrs[5:JmLVRGylJ]P _  S^c`*OSЇڜ+.NE視oQVýNŊ`Ro'YFQSQ b!cB5RO2zNLT#?2; {4(=B.hʆԽ k] tNCkP.J7}6V&wzqN7}^{.\5MJ#7Á1qüѸn(x劣XrB{l"tr(}E^.*C\ѩCrq$X?U=mqx;@NWE8?s'bapv`-Y!IAFx"mT]/ +|즠 e7 ezOi}aE1~ע +WhKeO#t^<ڋ^;d+> cgWj" =Y߉ZZ#bIs– o_*?,I}?/\$b* ɲMВGwA7nƷ>s5>|b؎4Ꮫ8ۮ~~%[hf n6ic"(X:v6n4ݿ)v|&st] 0Dߛ΃ޱ3o.Øaaif0^Ua˞(OEcGn.R7fzĪ:e\=5U)g|16>$iF-`E9Zǯk1ғ~w3s-&v0Z=>Av7iNz"Кaؤ={ܻeKoR*n2YS"&m] fN)yp&)!'w^ ihީmxԶp8@zi-jiA?1A9 :zGL+:v~!$ 5(=os$]^Ê_2 ,~F1EvbjwT4xd4̦G rۧ}/?4P [CHA?1/o?~&\*i08ȝ cXz`za\AIh_š5!$s]-td7BWƍΥ@gLhKs2&;&Y㌌%͉)a$lYd]W_Z^!?AgLn]lNo.#/|z9(H Gn+lĖ~A7'.BPӃyfQ`'j:EJ~-'r0y4υW?\5p^I;s;s3 %{WOCs qHw1#v P?7;<u1,aN*pNJ~!rwFcDs BQ+6C s3 ^?TM׶S/T (3\G{!i[xKrgFd A1BHX{&AF{hͪCn_ )ԯ6ԥZt84u8H52߼*gD=6 " 6L4QfA7YqeMy7Tt?`0B^Ǫǎ~p ?70х܊f&FХվ{qPo=rM%$VstܽC[Jm* :kÂGڰZ-5Fnp^33^iɁ7uo!=$im FVNVxP419M'lޱuPN0^{2dMU=q!0HTua))QiW@PD)1l4DBH={hPyAGoz1h]Fz6H{:kpآqw߱6 ?9-H-i,(=Lw;\w JVXTUz1.^07 ~Wp''^?O1=m1dO_ d@,8`1 9S֯ah.25*\{R/&;S4BwGС.B?-Myz]2ԝۄw5XA/Nƭ׎Jv}5Gȓ|biA%tuPys &k!O).uJџPdN2gn9:~xuti"p ^Rȗ:]]Ȧ0@`ܠFTw!jaiT"q~pR$n2 oowS8񘐘C0l^$O,Uf5H(0&O{pW+0;9m9Wێ6rpA@Faʅ cI!?q[MCj+BA0:)23KOWpIoڳs@öͨ9 7 #0kB>?1HGN7OS9OiDDx׾?^kr_=H|^RЯ8z!=v\_㖭 ? ?a+?+ # `F}SEXӝ4^%\[nw@uG!t-Nw.!pۓ&FfjpZ~<^hSWkZ8(bgW2<C n)$@BOA?H ʊ.p N+whɿ{Q.A73nCHAJP?)tHjҾ}כ#nR>Fo+Ϧ1fӍЕY9O\>|)y^&Ͱ6 }C=CYyhχnCB/Fw'irWն_t>/GQR(t⦉₨/.)Tr >X=p=>}Zc~YVnXiow#U R2Smɸ|7^3O7u@8oJMB$5)jDr#K VR~=9zk˕ 6;WNIetd9[%”]P.ضM"3ފtYc-[qC戮iF؛Ҽ~c]9Ȉflk > %)BR )oSJ̝I<@wt%pʚN,r.9ڊg8,;g$D RBs/ hO}~NoUY5:5SjEewdާ4)N_v¤bj۹iɗGЗ.$MJK2ܸ1 w±Vw՗9׍Th? )?^N8lr(}E7Lu]hƸڡgK ?ŷ9$"+o5,B K7լCykҿp2Г~IASt}E;aH<^ dx 55l~DJy {6)DQ(_*++PԚpl+1*$%vޝ?D{ 9:K,#-F8,+s u%FГ#,&EnL+=@Negw1~ݤ)Bѕvr=N7X ,񤺾 DW󪄐$0CtuMl84B\E#?sgWBF,B24k`V#E72)Licܣg9"erm{b?qxÔpE"?"~bM^ !YNK6FMug/!)Qf3M[zl9Jf!y-7h^uB 8?*Au)#r7'wy| V}.%ρ/Yo bXMYZVKiߺ:CUj||)|Mș̞!Ct}]ז9tLVTz|>ف߭\: {ݯWhXvRγ12ɚhB#I(FBv k S{定zUD1AM=G['=]Q]ȋ'_(U!' @""_ ܓg~J; Ѥ!-,*_:B\c m肷7hv״[E2YZSy B`&1Y@\c$#2R郞𮍺c)DP/ɾzsK`Nfwxz&x|حL0xiu%}=s<.4:[Odgz`}!NY3;c˞w+vZ&(33UBcS9֚AHOWѡ FAv[Iux692R k ׵PЉnBt]]0y-An8b&C#i(LFls_z(sE?e( o_$Ldh*\#J;h|KbOEA BV-ܙ_xZƏd+<>Bom4w6PKHĨ e]1L.L* k7P!6%kM1"̞3ytk@ޚ;A`-wH~&{i8bL{~)T)p\"3tc$\zk"tOmYAvOiy/ĎWnyljg>Nup1FbBwZkkJzUHhOz'9bHrGa{}f8VD!Lb(<JU5ZǦ4@_$>ױrA)GNpyQ4F8.(I #t.ԝ+6VjRJOs-uv:_@w^Όraə"{s Pĭ7SЯ;=x%RGP/ (O I7S :"xXlm(th$/z=>*!/nt݃Cz3P:uEXy5jpptT/Gb'Tr gؽic`g@$7vfeq;7ֽ9 [!q1<՚bT Na] 4`\x^V{TMW9:t_UU]s;ˢ]6zF';S(4JzrJIlY-`$GyF<7DsO2whxmUSt@NJ/i>4%Xu"|N+;Oh !]:r׎ɽw^l-ovE8Ց)J)=v锂Lvv7m=+n[ ˇG$s\ U)pS pY:YPƝ152eŧyH.A8ޖOv!>%QKK g;-S$"^ۦ[Aq8 mLq4$o(GVWc-wx͉kWcW n\ Υ#k][-3?,LnOqsȞ뀯qe--U4L(FfJ?2 m6zxfO)^;6 &Ebl ϲ0}{}AuKv: Ñ`UG0iRŃ#צxvrR<#}q8,:#4MSwVSC_;U?κ\Y.L*FqfefRj x&p _˅ȌSAB4riҎoQ?vC xzG4 fzⱲ?uנ"Pir&GF`~jְlb#VcфDG[2p߳"rj1aFY.N.Ĵ\.JØtX-W>+%чT7tbf:r}nڔ>&4Qj)YDA$Ĵ){z,Md >Ï>7OMYdQ;ɝٯOxƴ`6b3]) RS<5Ɋ/)[('9f6s?pw ix[1gbiLW>Z ׉ =]j[Pݬ-MOcoOD}{ۚa\ԅq=(@A .}5`éT=7Nbk]!!a+7MA=zG^} 4)IѰQ.ݬݎ [&|>VnXiowHNTG m 5ڞjՆ:$,p+I,1jG43/&$*ݧT e?7Oqe yGqUǑjDž7nEWaˆ,*LޅϯsxP7-^Â_;&ES{_UىEQOl=&eeh)_GR5aeGӕlsO\uҶd̝-+iaylD![k͂#MJy Ͽ~9WwsLY%8±3BǷ'g䓰.4Mc zXf)U8_"rb4 `+ _z׎\7D&zx~i$+ Q9(,S0,7+&!O&夓XLPG5bLVx;ʆ;~Wz$ |GHٖҷ_tZ=D_UԮqo<:2 ^aaS^cKUݛ1kD'iR#40YnqȀtROE?ǎo \4Ph~!,NAʓR('} VW>Uh~e*9.RcY%՗~ytpNg!T+!09mБ*l=>,n~+&{*O:=nc: / 3%׈I۶F1N MM*}!/b@`lttN/5<}(lc}h|WM }O^pSY@640ѤI}y3`.t9lϗ! Zw]3$16tnW^:4S!{w'E}DRU5&j<(1x&?l5qͺ17FPGp ʍ0 03]Cz~=I83U~ouշ^ϳ:NRwdIIyӊ6ߜ'/Ëk~t5:%G:FFɳcc/簡 MOgf;&'b(u'N&׏}~Uv:wbG; K63Lڹ@7<ǥ#Yzl6,’=" :i=ދz뱛WJ#;}B׮.+{C9=lTuVU+1^:vrwȵy#x)Gȍ_UעigO<:m/ 壊9?lEW^һt2FNK?~68 雾u\;.hp<=ݜ{#%S&oϯԍgT ]c{ow úK}hPxLYxJ^'^wz LX/~U߱]I)KI K^IRq̒m;koc7k> 0oIH٤uuzzQ߃;t(ЉU?}Կǚ\YK7(_b=Kw[߲X$69tJa5uBAOFWgnŖ"|1/!,7 #!\qŠZEn\oYuwϑvӃ@!^@5h4Y:Bc&M>nvt%L:)K,rW(4Pr@E]2v"-.&sOL"@7mݖBIOYPuz٥e1y&)®W>U?;hJf)eO!E&gϰY2-"N^ҿӯW/Q&|=< ='m-@M7r3+/ۧ{/o{)n 6Sfzgf ]Ե}ru(٫(l54E)B<y(],]#=[ـ_<[ܵQGQSzA -@rn _9mg;EȼBSg>ΐuGd>Uv~NZӯ]mdٛj2ɤ=/˥Ϣgy?ɉŬ~1{{7j}hph--"Py>=*^6{x75'{TALGoQĸ63SsWRq u,:tg^!@Xҩ L7SML,OESgjr$- ښl{vmG6Q)) пT749! XI>DtdFRip]׭Bmޥ'HӈW~@n*WOuWܷa & ؘ?mY.)tֈ!=:sm[~?ҹ);ӑ2urr['^Nι|a 2;?]cxo'^_M[ oh$L r Y̯wlKw]k%5$>mR_6y'I%ZmKwY;bں=WJ5qS" [|c֊u;W,/r[1nhqT:t$s'όs|mzl#mۈTH .9fUy2 n0R9;cV1G3S=;ӣS^()Wo!ҋf d [ǦV'^"I:cz4m/ dGmKCsHB79_Λ*̏)@N׼y7.{Wt +Q:W=*s]9]QBe׽c!]>k4q&W(]y}6::c5=H<}(K1:tC}uk%gqm'+r&V@Jk/t W|hMRR4rY:tO[cnӔq+uzE@ד@nnzt~?M_G[w޿n=rVGw˚ A#0i=4V5;}Q_cnI6AvaMq~ JmVi͒k6[z!X[߮ i@ZFڡaO@tm%tFɆmO,OkiK]i{(wGjW6ޡA]7gCf QA=j), t@@2[-gɳUQ϶)bHS¢GR'(5A`[у6r~~zG{[}ZAi˝t*#&W^SYF'%=閍f$ 2*#lUkR2/NJ,7ٜV\GpS`2wW$0)Ua5hq^B%  }i~}69m[^񶗗wPoz7FJoiXdIK(;tHYº#%7gtw{4t8x[.ɧHr쮙d`#k oTuuTu1!ݛ @#.#NsD |6z{3Ѧt h H8 M HȲZzt3TMuG@4rL)&^4.p'杔m8t:92991KGrAr|'wzz?,jh},gE$|W3}P&wJ*L;jA#" %mHB+qBy( knt@rIj۩g7O4 ͛il+TqTI8o?ZbI!fo=BLGTm4ٳTIKǟZ=NRt uKB$#[񗙹34>lCv;td,|hQ-h^:Դ'EҤ?)< _ޥhAwϋd"1/O:eCc 6wJv@4KINJ8@'e.T.Wx.4X!s&N5Њxuc s0Cf4uD͆;IDAT)&*3A㛻"5JHUwvdw摱A浏(t 7R>(1sA?~^$1xd3\rO y%ܳ \y|[SsZ`s)t}Ѥ"?4M}_NݨiB'5RR| id9w>T#RdF_4" Ё  Km{jx*JgWP@6Ur{S\FU殮}MJ3^&7uYR 9L[UN>z)ފ˶j6t,ssmEgr~ه+y]N띧h-%)RPma'ũ4p [쿊IENDB`rdflib-6.1.1/docs/_static/RDFlib.ico000066400000000000000000000062761415774155300171640ustar00rootroot00000000000000  ( @X>@b:;;:;;;:;:U;;;;;;;;;Nj;:;:;:;:;c:;;;;;;;;;`;;:;;;:;;;:|G:;;;;;;;:Ld`A;;;:;:;:JIG G [Fи;;;;;;rIG G G G T,MV%6%6%6%6چ;:;;;:dRjG G G G G y;;;:;;eqw%6%6%6%6%6چ;;;;;Bv^ȨG G G G G ;;;;;;;$5%6%6%6%6%6چ;:;:;nO[9G G G w;:;:;:;Fͳ%6%6%6%6%563ț;;;=rJG ȨqG G T8L;;;;;;;;%6%6%6R.YF EqjdUph`ZGxawļĞqdZ:;;;:;;;:R[%6%6I0LG G G G F F JxA;;;iȜF `PC;;;;;;;;;;})9%6%6izaG G G G G F̫NZ:;:;:;:;:g'8%6$6g5̰G G G G G `7зԾY/=;;;;;;;mLV%6%6aվ\2%6%6%6fz)G f%6%6%5Ӽ::;:::)<&7%6%6I0kx(lx(H0$6&5Z,XJ}j f{)}j!I G G gy(%6%6ƨ;;;;;:v});%7%6%6%6%6%6%6%6%6+5zl"G G G G G G G hy(%6Ƨ;;;;;dm(;%6%6%6%6%6%6%6%6%6&6`mPG G F G G N*5Ƨ;;;;Mx~w|w|w|w|w|w|w|w|TͮG G G G G F A1Ƨ~~ƦgLG G G G 63Ƨ$5$5$5$5$5=2վG G G c$6Ƨ%65BGQ%6%6^+ͰqXY<3%6Ƨ$5$5$5%6=2lw'ur%`~+*5%5%6ƨ$5'9%6%6%6%6%6%6%6%6%6ҺԺ(:msÅ%6%6%6%6%6%6%6%6%6?J&9%6%6%6%6%6%6%6%6%6%6JT϶ouOY=J?LW`w|rdflib-6.1.1/docs/_static/RDFlib.png000066400000000000000000000510261415774155300171670ustar00rootroot00000000000000PNG  IHDRZ=!zTXtRaw profile type exifxڭgd9nsZ=FhZ̪3i% uşCtXVϟs|>Ÿw?<>},~^}/}aPwQ1EN߳p]<)$ϝ|!?d]?sf}< VmVO_~w{gv#Wr~L '˙*m~E6SO~!77}s<=֒WRͥvjiX»oY;1_?]^y\+_ CW^E@iy`yܘsY^/f`{{5j:63yL9N"+q3ʘSEݛXx%~^DI"ihK[#+RJ-VZeTs-V©aɲf֬hVZmZm0V\z뽏MG\k34,Nm9*.[m5vinm T:Sv\[nvw7Q ߨ(DŌ)$tT| 9GEN1=&-FY1"O冟#r0nw.rNS~_7QmzR dxi#!^ޅXvL9 HmT{3ϟƬʓb,I:}b Yxq:ΛU #wrbk8;3NqfnjfO4cHuAsa!$ ^ 6jj- .! e/$T(ahD{+sjQظvi &Oډ;Dm}^{M/@0TO\BJg^GaFQ}bHw+9,jl|O%I(XThe=8b1kl-1;?C BH@|e]Ia.34G,ܴh۔3eہKo^)R -_$qk. 5:!-SZEɆs&͋۝. ՊB#pt%F328t'h'f| `\9`p8sƠ0i,>PD]PmzЈZ|j.K \ȇL.[+hY͇$^ns AdCL@oI[;v\:DeӅY`nw|(̈́{ a;YH<9~ Vqo~'?7+u I_Pr=sp]F2+yfraOmJu}Wj82!yPL*%XCE "pp'<)C7ц1:",62t1Jk G0U[c a6.y-:`}"hw L]KBrxqcMk#iEoo(Q|_ _MUxHwD"+Fc@pFr#WX(X>|;B5Pum*/Q ,dp"-'lQYk!&@|0.p~)m2fÉq=tGpHa+qiVat'"X '8b2GgH ^` 0JU>%PHb WXt;R0jYxj;3 `HJRF۴W 3`z $N^1ǚ^OPړG$\ri 5 r_Y\|& t{ɞRaŹZ#NgG:d8 w#܋_pCX, {!hxL}'B$v! _B21%w;g<XvGB+B7Q5@0PR @Ma~ >`J@ʇYP%1 5IƵbP vusFߖ}snĹȌRp|$p3t4`cMF8lgB +h Š5N\ G*B}$!ыm=YOk9#Fß)1!BX TY-r X++ _=c(T^BJ.b ^zaw'V!ؕ ( }ͤHbRPTIhH l&"n4D, !JwYg: U^F/OP-Qd6Wb'Q@IdꮫF/+HNL)Y&۸1OHk dYWI;Qqݼ y{yM7ejBDw]>c;EY"< Behn#&s@b0Xk d -?^-#:W2P!rBX]b^*LdWKH-n!%sc=)ydu=XQTHMb=we'1)dB3?r hJQf8IC6,I_m tmhAr3| : ^@@F=22E knꄖ^-jIꥳ' {V1z3N!t ^jOIV0-h6bdҵTܣ,1(?D1ifz.Jfد15 =FLjQOy228V(U}[zIy?Q S'#WBW sIsΛD: `\cL1f/N7ٿ%F]$/6̀kdyH%p2PÝ~ UKnώ ;Jvd&@p}\EHzk-m) xڇ 0 fIM|˲a92"ԛ^jt ڸUVwt&M>Wف (׉OOqd3 blbm\ar[W с!0I,ɌLj|1,8 {Zm(gF۬u\"Ր@!4IlJkU%pNN^UQZd%kc $ۤBI_2"WJ0P1Zq - (܋1UD\2#; Oj  4-N$`7~6`3$]{NLݳ\N{,1 4yw+|122NE13_]N %%q]J%ĒbksTra'\Tdtbº_?kHzK"L< C54JZ)aJր_\FpX~ʨ9L,ւ T$"'FT 8dޤ%RTL OҜ1wb&87V&3,%\DXn%lUm:,eII#Kja l%5TfЙ.ސ8:6rA!8%sǀK2!"NS~Zv9c%[}y$ tlU@L2a@9nh'ldtH7ZT9k8pEͽFRi̤0Wҭ3DҦ@#vHRS!~FywTnt x Pd+!cä\KjFC[X"x@֫f(7+ 75hՖcnԕԀr` < *sz0l0R+Yn&^%E}claݡk:aRqzYaE+$.L"x aCfWTtL4P]A:@y9"fZNT QJ E]+P=' 9Nqځgh4=0"p0aj I[{S ߯. >p!^⅐B D6ssďTpT)CˡҊ?g f2߮^!Cθv]=CLg~,ƯD?L7 (C}hki-VLuqѿ J)>²: \ґKBL5QM[UD*m!r2˔D rdkxd+kSs ڍkpel9s>"L`.$ס !_P8|y0/d< 2 ^[ Mغad;+иUM?C*8YhJ$q%0jUȋC`@2? :뭮͉UW`Ͻjx,;:8.&3>5ܠOb=_P.̬8uD%'y(//3 xh]Kq I /,PG Fl"BpQP;Z](@&p*î/Dh`zQz?5BڠГSp| 4aZ"x0.N~^·1ȁ]w]mWj ѮsU=8b$ g`W{1C;mQk IQs@5{wT\&iW9o1.pڛƒM!# C LQ龫-t:yUڔ#f tB $T Yނ͐]mk݆F*WMZ 5WѲ [}@)d ҥkO{ ij#z.i(42š,ёp?<=fڮ,6Ã2pU0H*R Iq(bXbTӃ{o;B;ғ; u^VW7&>ױgX  y !v ڣd: W|"rMyF0ur~QxOc\Xg@@`KjQ 䎑,-т͠>fkƯwigB}¤D5}.o#y1fp?E$y]!Lio~-r~7˂A݉Lv o; ݷWQTݬ.^VQ?GX"+keINFƔ,P75tt-s`bjOxu\ih>i!kx}C5ԛiP5OU)cW-9EӍp"]wM2qЯխX@;]=b} SL.lPU*Dt-Jtc*yjūGpz67sb^qώ*VCm 崄P "l>"P8 fB!@BWC=,Tt1BDXxf|:LPcm,x)G(tl ŧâ*2ݠf@ЖK3cQ*IŖQSÛ)!Rh(}BC qC袊Qj/=wbwݤLTm[GGF~߻ixl k2G2ݠ`dT-0x`P_ P{E H_hhϼ'!E%rɲ8zz̷υ":kRܩ4XzKn sr¾NM=܁GCM MHemT/=d)vIEξCmf"$(f:TQI\TB!]hPd n5R'5:wDѦMaa ~+؂:_ 9/)mެ&;ɦSc3|u#@aajAmt%bQŏI5{Hs7>^$sCUmD8vթ7ih $7Tv8[O mt&o[:Q]KP!t #1 {R$7MJ/O3#qL60d ^Ԓ}x+VgTa=ts,VaKl*a8+JX2iE~N:9^. _V 4m,o>[ \lfm!%*ET%u naѨeڕQp.CKcΚ@25)hxU&;W+d׾M9o-Rɟ:;<$O1KW-ici[ݯ;bqЖ*إΓB> ʂw?f _Ck:YQȽ>C2OKX3Z#H;MmW!FFAʓ Bh:!aݼ=B]NɛzTLJ$A(N2pW>TD $ҨE֙w@ۺk4$C$oGbڬpYGh-xF~f\>3Id¼c^ i%<#= rKO~e`cnPJ&zL/Wm&>b^::XkAvmo$pD0l9Gݬ4tn9Ϛ k KnM_I!kg#jazIw"+kIQ v*2ic2Vv9bUp=%GΓ3+9n78d [SAY9>xЗNƢ>8ae 0#q쵵 :2xCL$Q'G@/sEіH,j)_(a/(h)F8;gZ 3 @!S3x4WkܝjI /Xc[Wuܗn ä1m#8ofޢHՅ(L?Tu\YHAaQ,&OΉa ՔGqHSԫjG=Bq ,fM]C.CJfBu gʐ~lPHڤg,(^# l5d`u/}@,Inb(f]3"TS70ȯQ`d(bz!PQ Kβvk0U:)4WI5}>ueWҁJZP7PR65m֑熰u@DZQ@bZ1M}Ƨ5}P%~-UԪ&PWl @F4$Л6+'H)j$b2 gS T)Yf>:>Py!@uUb/GvX3'B _\zb>iCCPICC profilex}=H@߶TD ␡:Yq*BZu04$).kŪ "%~Zxq}w@Qa5hm 1[Wi 3˘|=|,?Ggaӛy8JJ|N!/K!W Bwo䄗I]YwcigJo `z[}u[S`ɐMٕB~Fߔn5os>U88Fξ[Йrf=bKGDC pHYs B(xtIME:.`Q IDATxwx\ՙ?)]\%Ym`2! I(!dwlB MBM a!7,EŖܭh=?dk;<{yP( BP( BP( BP( BP( BP( BP( BP( 'B 48Mj҆6!M">FJ@sH!.M"FZ~+?~lY2BNRB0w/ ?m=@˩z䩿⤔W}E*o)+!Kݓ.AP8d=RR. mWVB^}A@;r>hPlm(\Y%?KX|wyvYxNU;Ѓys;DeӨ@5E6TPB*f>f1N܊3 U뚔9Ku HUT oEw^M)f&o6'yCf|pz+T?vQaq]u)S(or^۴MY'QjCGW&,\O( c.<|eS5-nUKP{)T>E ?,~C\e %tW/X~KX2 }Jg~)߹̋U~xM{,Re++T 2PG],P'W rWߪLZt_&^5/I!Xwl-^k~<쵻)$_QP-ZR` YHUk[ߵQ(} ܠV QbY1y"(Ze}Qw)(#sHa cP<+ Jݹۼefixe(v{nbӄv2LDhBmCΌ ,N2Z撏͸g+6t*_^Mr?\+ j)3(E׆FW{ e%tE\\'*?ix&J gxjM2Y f23nzY"ݧ(z'Z@ ,3{ì.0z3jOB>j#Y_ 5|Y?=o]2ic8yvҊ*!zJKBgX›UPBi0ԟi?Api\}QiލFY?33SNKQG瓘JUfrpM齾wegM9s>.5?^GAI5;+8Z׉caә6.e9c9~V6k?C|fѓv+(+̢y+&x]6j{ x4vz\=pvUwAaL|'{i,t[I+VRBW&(/<cy{kSɻq<{?u9nzS>oW˦a ܿ^yE9uٸӔ^+<} nxIsm~" G~<b8O:K 'h;(M=^#o2/.{I}=StE(*+DW8v +oZ._]ztZV] ]qFZ*{.-9$Fz}uO[+b”#}qDY}uLKzznߑzKNg4LPv# y#ڽee:m]&kY1DE4J~hy.]~ϗ_[8wW(Wk>棊lު*_IlM,2FFc +8'=E3|v00@u5'"\9V ]qz/Ce~}Ʉ#:EByo=\qqDQn*j0.tSNcp[OLWcrzlQK8vppdwqE* >Ps#b@Htu y RNoviДp[n .ټPe%S7DrRtt.:P۴ >꣧gojԢA7?LWM# zS[ "27 b\f)2 B-Wr9)gsV= b ~>(TqGGht!ǧ>Z;W\ǛP_=4j$q=R]<RV(~F롙4 24g%& ~6_ gyJՒBk%܀?Y4%AҜbLj!he-Bw5o]GGU:8Yji%<kL&t{B OWNs#F70뒽_-SHqyP }$C27 ْ- Wkv;ӊ $B7N,]b:DaF8}lTdlz&"'$,zX+t{'zSvh&x[b$6>Zƒy}A#t 1a3Κ4ڏ%{~q+P_CXt9׺Si>0 GmZMDb"&m;͠xW HAGgKyA+KrDVASpi-gs ~K8x3X b6m;# #i;2jwƱtTO!MJƗ!,&|,N"FW1Rlp3e|n}ms#5^#6PLJu}~:N\H*X"m. = G}  KIԣLe`V 6-}/?˅0wW_%'iL!|PgN \(∊$<̆ͪttFye=/|:-q~)gFfYZVL܃XY]DrTsdٹbǝ-t!{1n3ޕ5Q gNյt!ͅTomًiIBvӷ?5qgUPW}q^ֺ] 0zKu*Sx,m_ҧo`"?e Iq^@ }GGN/o>nxBbQa IG5 GEXBָ_kzKuCBg^@ ] vsjk|,zxjgcZ# cUVF֨{Bwc`HY|w\wּ4w6pCF@ ]H|S,' ZtԤ蓏 Kd9dm&˧Vw>\*JtrV` |r+c=Qz}6S'$zZ݃ 3KVe lSk4F,!Hp,pB&'![+]XE:ZWjag6ӃHvu $7oA@OmGz~#u!|ݯBйd0% &.Ml<۸Z+y{}G)'ڂi3/Ҍ:#.aڎ0QwZtaZi髑R{vihƧZm1qᣎ}2&~\Z( }B:2^/LY3yy0΄j4mCIA0gc4qѝp~9Ϙ@z٥t4\n9!䮇/28\*NB f 4@_p=%w12PB_rqih;NiNuǡD#+G$Ȗōt飋n!;V;c0@R˔qqtqVϣEK1k)tnQa\ۺΥdPj~S‚+tJ'wm ;\} smmKhRSB[0B?)Vq=cieLbDuX?V l0ÕP˖@zeԡcHXFt\pza6ӂU |KJCKd;ME=S!?nlv}]^TboX?v0mBIr{_bxBP{ָl؝[vd"c?WRU(þ(% =G9B67<6{q^ݟ@sg9B?Lȍ;Na dT~Km <]6Ի><䪄>V= $~){26Ae/n:x çǔFOމ5=={5/_ͣ-*0\^;A&J B/_r|>Enf}8@c8Wnlœ:Р$<0ʴ?'jtՏyH2&N(/,Ɯ.CT*'3.!a}y;/Kɤ$# v~]QճI!6 CBkg$mqgCE4T61wή囹edO[bR+I.UzVk.פQouQ7xó;=7~JC|FlٛʳGq}Ͻ$==E na_>\ă' 촔;9g%jE5n7.7j6m}\r_X@LCiJX6O_WIx4Χʿ \ [`:|:w'a 1yGpA&Te#u)o|"MKd25i{v8Qz_+[Ii+c*2p$@ 7:i,#sǵW"g~Ӯy!TڜC:OkM$V6mumhodz5殾,ǿ8'azl!n)?X˚Җ ,PR%sxK*ԎB_$謚Fs\Y?7;/Ǣʰh>#ų)Zv&n܆8Vn+pђ=Naw))>y-{_JNv 9SnW1»@,x4z9 'G8i?׆?ޜmzIDATckdSZxon})c˩,pX*pz~\:{9ZVǗ,l&$-q5El=w_,44D k+B`sXdBkk[yyH?.5yӢ~c-"~><ɩ[1ojHyU0?擳:o_ N Tr}~3j,~k.xWk~[*2]J|^˸&_YiE} 8b$<[~ONGM}}nCߞݯ;<aᾗh#K}qYYQn Bޤ@x@fy|kd,~}?/7/}` o~6g$4B(mD+M*Djb-O|m#\is4/0oiԶ/2h^j8Ȍℨ@{0;/ğۅ66щح hu꼺eĴ/¾B|UW>~nI$r8 ЦԵFwYA't)1,׾ ]˞mc_فN"[csyfHP5ЋWm8 |dN7y5_7$._]k>sR8?.{]\kXV\ɁPuۖA+t4^&g[mguJ}Zkv47slOIIx;W}Б:IGJsV^~OUҞyZ:_ xb]%B Pu;[*1Ѿxf*H:_ˎ )Jcrr5wL[I{xź 7rzzzk^H氇{f~$,a||t{ߧw WM8T"zm Z-7bzfYi2i"$N_ϛ\ 𠒠Z^_ow|DkJnɭb:O sL#Nѝ{r a?\G$ņw[G.o߶] +):̕d }Ŋu-ASn.׬֟K]ncm B\+9=ƒMtZx=kTlRr5bPb !Ĺv%G߱`rO={*G[]AS [ E삓 g W4J!%wNCksIAyqU0밧~ږ;/ڕ4e^®yk/]n%OY 6oqd$0rjPrc$xC*rz}/6܊[ӖnCm&H];r3ʘ=IN])%r%t@h}єU>=%t7Y:&xXpk{5()+dێA𨿩{綷ǔ %9@=+o)H( :oW3Rnl2ƺw쎁UcQ&qD˯YW3"+Z苫27yGYq,GWY[ qa7se*++|ɌxwkAY{,[9A O^ųٽi.+c=j4X ]1̙=TVBW k r*#2*NhlU޵򚀯Lnٵ=Ij" 6icƒV%uaT6h6v(7\LJj'6UDAErt;AuW( BP}tňPн*XRQBW7xDڼo}hFWW1z=+? Y-q BN!zVA*zV_%\:K ]t"zVCʬ|U&TBW>陃?HP,_ԙ{J芀YLR }Q u]:ԋYEt[@ ,Z B~l>t*(E2AMy;\`%BUz-aRNPB7Re>kP?^E2%t"J6++)L6ɕЃ2@gŞ\˯̯nDBNE 7W)^~M¨nz$RĂbkTjFe~%tSѳ +KMݹ_tk3COh%t 佀 ћmxSmOi] ="J27QxW1Kr۪\߉ mmeɢhq zV}U/]kH'eH~gHu-u_ zwZB1y}ު/>w鞖>MbLV _b6𬲄pDW#@ =/}~ nQ"WB.l gLBo;MYTڄF-@RB/Z e6ؤ} ֳNBwT2OkDZ0@^,ܩgLkXؕE߈JAw_< OP lLaKv~2z t8qm v KfJz] # M>0o$f&7E J#%Oy5 +DŽ x]+*yy('{&3w; 34"KOw(aMdkjub/[b3<) J(RۮgrXyH ]+-zl4 %ST`< u6}_%W(Y[CRA#,q$\@}H6m`m37u)) BP( BP( BP( BP( BP( BP( BP( B1rl}xIENDB`rdflib-6.1.1/docs/_static/RDFlib.svg000066400000000000000000000067651415774155300172140ustar00rootroot00000000000000 rdflib-6.1.1/docs/_static/datatype_hierarchy.png000066400000000000000000000401701415774155300217340ustar00rootroot00000000000000PNG  IHDR= PgAMA asRGB cHRMz&u0`:pQ<PLTE,$1YHbϼƬ⷗ʳϺ``` @@@ppp000___???C6IlOOOooo///dQn`bp018HITĴҜ$$*TVbx{ <=Fln~r~&*,38;Ybh fpwLTY?FJPPPҊ%tRNS@fbKGDf |d pHYsHHFk>>ZIDATx c8sn]HHh{tK w$`0|Ό+-@n %Y,fR2n6kJ.I٬-*',f&^d&Rsx8J9L/N^~꧷/sr?ɻ_[xRL7O~}_H}7'?͇ij;'o~:9y'_?N|gI9H.U=0ć?ԓkxRLe7J&~»n7G /?e+x{z۟~=y A^3^īć_?tx9I?=yy8/?9>݂'Q/|P_a`&K1+gS#BgN<%B̮[ڵ VX+,f3 `Lb&Xa1 VX+,f3 `Ll]ٳ]`I1[W3u%/YOOY%Ml߱*/;90J3k+߿:,3ǚħOSs9޸&Hzqrjz׻wͯP2A{L|>O?~A2{ #ȷ/_ >C)vī_{ۓ8.{_'^~L?_>1/?߿ۗ =_'_Pwm31$0߽|?B&p.Dte"cg'&|Ii<_峆pk|G2\> Djx߿| LDz9Z&8L/_?c'#?!nA O~xa}rBn_##0^ysA&-c"?/?7kq|||oT[f"" r ox\@/Gǟ?Lԫ,>ӧkN>=@S@  ]?-3#sw'n8b"22O_1"i\^ï'q~gn8b"2 31WsĎIpymhW"0s뜝_Kė?%^)`": 3]=8ff"|م>n'VlR/v}XϽcLb&Xa1 VX+,f3 `Lb&Xa1 VX+,f3 `Lb&Xa1 VX+,f3uM{PZ!iL21iĒӧkԲmbK7v1Z.D&qVgb^Q[p[ҍ]3n 1hV3DX33LL0a1DX33LL0a1DX0_G::c-2Q8h.FQg#O>rAxa}Z0ѽez7ݣ0*"Ր)X*p)x;1ݦTqC_(4흈ʽ&UULTV,^L.,\h YmNL)UP {s{WL57ČNF'O3Z]l!&3V*kPu'suUk* 52u@fLDDi&߻f3gՙG1Q}yJ*ExTVt 3 1Q7z+CLm(W)2QiAu-<ƙV|L%zg*AbBc2r_s׫PsT_`"ըtU+ZUgD(WeрF;)"~&嵞UM൫l'RA Y5awM3Qj4DP]]5VQ. Ǽn[rQwLf(j\T)n۫"X\ U*P4+DtA&dhdP~.e'*(әbhU2x}L,m5ܕk1\%vƕsZ5ك;ޛ% rCv4<̆_`Trߨ? !쭷tń*~?0wV.[rM8rGT 01z?*TQpsKd'7@7 Ģ֫|uz%n3`;ń[aK.a6PSr{RL`2H`B/W E:iE.&dBNj(&8~ApU 01z?FFa?2F6rڼ3xELX*KAݧVvz˄yv׻A&qpS^GEu9;V=MebFۻa&<;q%:ƉEu9_U6'"h t070/631|Řg5O1f*40sU6ts\%ctom 3 `Lb&Xa1 VX+,f3 `Lb&Xa1 { *z6,NDr>oԛa"4XX3!Vum.ֆpڰ9^6o1k7Xē4x݆1k7Xē4x݆1k7Xē4x݆1k7Xē4x݆1k7Xē4x݆1k7dWL,\_芙x IUV*PqL\DW'eI{-tRfXXV𪲚b%GB쁥*tJ)f"^W f"ܕ]0QT㕈fߡ%DsgbvD(" sg"x(p8nR=_yL]t+ ԊW(D(\QĭjEK5@yT/:+|ٔ`ڪ 26ZDZLڔ-OT3Aѕ{u&jQRkcq%?qO0VdQk_(2nq]__ L#*yWr%Nyj!t n=lbL3( LmZJE~+i&X4hf6[ gDFlu].gCc^ 0_1[:. Џf:zZnKNq'H0AH͛rKͫrQEna4"v*'̄*yE][]A勷 E:/V*Mh+hJY\4fՆ5z#UU[d ZNj6j^+ ҃j^DU!#YɆxLT͍ esNTpUMA&m;rsWLޖo>&D~W'QǣZT-PܵrBy8A%ZXꐷrs[lTa@Fh(G %r0<5NP>ՆU W*wf.w`(K{8LȞ qZ4e$5cXlxc܅<=x tzbzUHw%]W(k1gSPW~v& <4gB  砅+ WוRzZpcـVLȆᝪ|n.?w\7bK1^ $SLPb kչLñegjLs  솀״:} 3JJ |4Py%q |h(U=u.4 Mf"#](Ւ|&0]ѱtzl:=O@MçbdCnD5Y˕*g=VINLPES%2 &TU"U)D ß`J;}yTN_v=u!VjgM L:0;72kHsuL/';'*% .(UW hPpn7XXXzKpbt"D.rJEXeK,] Rf=T6>vĶb;xb[H<-RlOl )'Bm!vĶb;xb[H<-RlOl )EfYV+9RYP&\PgUp fPKf8F&ԌDZ0#f+MQ@?BڼLw77wcc"p$(]L]$*..5GBbV+Y,Jjԏz\kj-KDu v-To*W&\5puL[KqfB11ZcZS]!'#aZkN1.. xCr(fVi:~\&j㘘Z(Wɧ]DQq]Zf[9u>&B`; 9Cv!`20 LdYmQ O5h\*kc1\BP04;!nuAw<|ZC& S[SOsD0,@2UYИ&tZAwD5YhЉ zR"Xa01'Yd AGNL>*k,K=.M712v0 9F^!>Gt@h'[tL(k(#лpl 05IL6ZE>O SŰ  E #Gw;M ed-X$t.٣#hx4;Bv{0ԇl}0L(kQT0l-;5 }ba2aAT瘉Ua21 xfbb&^$لvYb$ ӡ^l )'Bm!vĶb;xb[H<-RlOl )'Bm!vĶb;xb[H<-Rklr㺿|dvזZZDrNfh.oVla&p g&hm83E;hÙ-@Llpfbv8І3[Á6آp g&hm83E;hÙ-!vvv UI~[=1}<ՙKMIsܮcxGϓ;S&XmPS8L&0TDiZ׶5}(bc03O<&,cN-_I9Gг_LL:Ѧ;sY&g=B]KObsܶ$LƋΘ{ v/;MƎu0 Ӷ/S8ez[U5 ͩצz28*Dt+Ҡ]P lwzvKn{j?0aj{<62=k~F)KNGN#gLhCL9#051}ѭaޱfgY6;SUit;P@QZL*E2JcHLԑkhjP }؃m:,*mdZiuueÖ́Na{9P=dn wxw4i1kд :{;>"&T1c)ͭQd"pTU.VvlmjƘF/b-7 U 1 9ib*l]_#egLLE>5y54d.Hï1t -2AȀ~&GŮf"H%BGrb;t=8-A&,'JÚiߛ =h҄ G{Ä64^J0\& 0(F L9LPLK2ha5.fomx*R- 2!cܰw c/^00 2afz?>I(&Cr%-3i{iFg(Dਪ<&[ s$hl 3b"kL&351>1$f'fLL 33Ab&|b&H̄O 1>1$f'fLL ҪLTfbk_1>J6aܯlD+1i{jc10vԦS̄O 1>1$f'fLL 33Ab&|b&H̄O 1>1$f'fLL 5~cOy<&*Y9=J-SR|2W'Sa$bROJl%{{Z8^>K$XGyh\Z7:<&6zW8LWA:b"ܢ\Oo>sڨ-U|&c-u6g2- _\DŽ[^q`8&Þx;0{+]}τ1 JyŽ}Lxsc VLcy5g)WTOL PVĄʭuc9 056AL dL\&yLhg/('&E䜸n2jx{g2!Ԁ 2DŽl ń?CQ?uCP;ctt1ꊱ>QKJOA&anig*룮-, Kf` BcB 3ut:lӀXC;1M tv_Gl&ppcmI?֝Ng&J[BhA&mۧ ׈2UB)&zV ;&4.4Rv tn{ tej7HF<+&:#:FpzlLk{a&TnM5xL8ןAV0>a8ExV6ISvݷ#C~?=cu!Ą,=5N8Lq0p(S)TbLھNL2soT7ST9d5a0"T8׵vt1&f[2COH&1fBɩ֬ÄaTUC : 4ҲX)LXL~tS)pvTC?NPT(LCuC(jz:w0ኍf;&0:_ K[2!sÞixLeɠ tCDŽ0Lb~i4 `"H&,Lj1pƣNWuc 1a 2naBE|[G>拞e_dbqY㍍] mCZ Q+lL &06Pi^^RF R)T:)x~ k.?:Q WF jXH:tO3tOqdM x04 2=&@k kp`Bɢx|ia;{&u6;]g {*M:1L 6ԥ{RL:i0L4j 001 $tB'1FPiv8b_ޅ2QO(\BtaL4'nτXu`e%cɍy,CUaWb@Yzm 3L1A,M#Kkmm@[h><$Sgm` SqjL@1UZ"}5m*d/Tc HТtXN'{f](V(a :Nל 4?m{ݵ&sm 2F\c8{uێ20{v{9ybUjE;{sCV-^GsZwDqc写>݋xL,֖XW+˼."fa*Tg v$xbNyVhSܻmAߙ,f"Z33}྇Lpb&v=73,f"ZIJb&h1ˊxz}g&"OS429ݐr[3X.Q:%{XێTN+ֳKFM #&64mL:˴bÓ f90k BDj!EwќU \+ruJE;boO'g"qL }._޴CdBj>xwFe]"!a)KXpCdb_ Kb)b '?-SXL̙iP;a2VEpwu}",n'p)̄*N(u&ыFiasf"r1a  MUo5l K,}lw{\Xo6rSmBL#&H࠙@? Y\kN}sҟ踚8P5}7[4ڗrDŽ*2p<9&&ڸcux4lk~wZuV[1V~L o52/DKq܉τ*W߸;G: KWWMOD6*9Ct 201f6p5T0M-"DW@j9N|8DŽA!1$_ ;i ~MDW81? (hy|80 =& ѷiiFthfk$a : LQNd͈τOF:TJ{NL3zS4LhCdg])1A3CsB!Wy+wt1AEtL :=66E2]%fe g~mUG!z#|ɄuG/\wLp rs؝7Oy6xJڸ-QLfs(t7`DipxLHxLO=0 D~?4b)NqkjBҙ;j&=&4q*<\&V~MLL<&p8]o3qL`1͟l0a%9ˮ2:yV.QNOgI2K>sw1Sb&a1 {X=,f3}྇Lpb&a1 {X=,f3Ȭ]L`bY}HH۞ F<*j6>S&6  2!dZ0<Ͱgfuh˄&䐱y'`hCK $v:I&FcN}wup]'{j{k} \N}|&yB}wf6NG*P7m}ǵ 2!W7[tK tFU.Zi>s iS>d_{CȄp7τ~W+yLjYzP.A7ՆWY4t p"moca}vI4xdLJc^ؔ edYҨ@[ Lc9nkoceƎw3KųjsW+M31nFڶ9L eQw+ǑƐ7NcOy@y# ,hp𥉉CD9D^/*ML֞ ʻY<}v;Z$0.`-VńO#JӑnCp?!H},0\/C-DD 3Ɖ\[|_1у %)4+MCGicg:=MFτ-@p'`)V}wCD:?TtQY_K*p= 1mo3iScjE&0p.i0T?Wabُph`2>n z´xsSKq ¿o!%MpeX,bX,bX,bX,bX,bX,bX,bXDWHBuXQ'A1GL1uz2d"񪓉|>oD2d6uM)PKʍPfr 994d"vBd.\<»L&H$S5 T(|?B&&$dB<@؛=JN3Ёc&\&J<0OO3)%1OJ>cZ2^dN S<yE"w tf)80=%"L{>xP2i`c4C>uM1N/wgi4bdSC'6L$Nb&.pGʝgtq6ltYgt0`"1U'鳳tdb|2ʔĕ O=9RćHܟݢ\.-@ `/H\3,Xq9yL2d BN&.Oi$&gx<(&mz?&B&/燴xE&2q{!ʒǴ80\^EN1q-!k{( ,Bqs?LmQߠ Pjt٢.2aE,bX,z?vL3qɄf퇈=&& 7er+8}tLݒYxlf&j%_Gp7Y Zߑ\Spu`C"Z7Ozp B2\~6kgց ցalnw@c`bu`x8yϟQcEׁtX,bX,k:KvLG&CP2d'W qNRM޾6%9erDj `!fNadϝP4)'Eoe!s cu8u@HExo6PNy=ν!ztTIsU\"sL83ƟJx)IE,BU5dy̹ Q5)zZc.*O_i6)^H{.]&NLwlVMަ֘,t%MmcJ8C XRex9){0_;'$i6q'Gf)5)zZc.*M_6g[}:@{|~L[L%)uY[VRdgiw{f_+{A៸OdtE>gRT:3ESm_7 =\&RyxLg"ǁE)4{gBnKf"9 ! ۦT4p:uyRnfbϐ 96?OaޣN-ӌc (`-h[<{Ȅ:HfB.`bN8{`:r"ʤ2G-Ieid"K1Hs>ƄK2dH&Ry?Ϫ.s8Zd$բ\ :.$.N"ʈ3\+]wf2!W s /&Z.Ʌ[P.TNǵkgLr|L81ry)({?4 \ے |pb©2gٔ(RqHj).$u/Yh率4>&ܘp\&{ OZ-hg9MI~?172Z*_T6ɄZ*Leb.T?frr:95V#g%tEXtdate:create2013-05-07T16:42:06+02:00m%tEXtdate:modify2013-05-07T16:41:56+02:00>IENDB`rdflib-6.1.1/docs/_static/headerbg.png000066400000000000000000000004431415774155300176230ustar00rootroot00000000000000PNG  IHDR"sRGBIDAT(mI CT}x_0@)c c KXJ>x9>x1`1zehټf™t9-8tɞOFVЗcYGXwwq+Fn>+p},\?O(NbĨ\1|4313+^'˩d᧞670I0w{dL^?MsQ+}q*NPEIENDB`rdflib-6.1.1/docs/_static/logo-rdflib.png000066400000000000000000001173011415774155300202640ustar00rootroot00000000000000PNG  IHDRX*zTXtRaw profile type exifxڭi\9srH4{;w9UUT*pW5F.+/"/%Noz-~}5:N:0̑/E~({7Y?sc1vx)xRH?~Δ>&=SI\?}ayz=XρVm^W3ǟXϿ~Z{w|nXuSn}˙* _{鍨mnu9fȊߐ3pK1ZLﵞZX@8nl.ELF/y:gށw‹O/@*CZq]Qe(r?" ~iy//6sӯ!V ?r+8'_oS/Dp1!_C*bl!L<ŕʘSGϴK jjfIr.RoK)(k*-JF=KG#7h13O5yWZyUW[}5lŪ56lܮǞ'RSN=3μM7rmq}EרQ־"NbFbD)$tT|9GEN1#&R\eQpvPĈ`>!G7o)rNS~ܟq6 YS2Q~/%:&dy%(}T؜Vݨ$3rU@=m;-*@{}Y]'9fqr4.#ؼUxeG23&o.鯟5~ZGܭ0w9)֠oaܞʝ0{'͖O Z {HVpSt=D&2n ,;.)n^YN̾|ּ nE,#U ®6G+s}q%kXTd,Mvݸ%1.7Bf1醹B8ik"Q:Tw;n f͛C߫a>wuF#T?r[s5}RΌ{r[X]UC-'R'm)2񈔉kY]΍w ƨ `v:SmO>C6Y8w(]פӬ+S]-oҍ\Ot@r]1udIid;GDଗxWf` luZ\rm/96 ?NhZ'h,Fa@\n9z 1OeF!'!}}riX Y,kj{{ą~%)ܮ a5Q#v3ėMP#Mf>Vt69$ XP@Sj1Q"1#J@E H *BMSIۍ厠q[SCgq)zTgw-.!|K&0){yN㵘j_EQ2T"U- fA/vO0 i@H2!a}D?BHHz#@i+H&Ww'|!4_FSC+Gj8gJz wY+0(5VdJ+0@|BɈR_>cH!Ry*4CZlˆ$x=i֙vSX.nE eGܼlRV, @PXyq\O ʒ@R`Cwjӓ콗L5Ǽ`XwЧz( F*o>aY T F"P3f钴q?m.j$ ̸@}I.B 3 FБt8KC``R{.YHU#k4eg}`IWPesb9׃DO Zt a=*62֚T^|b E؃.ȡTm@UfnB\|c3ͦ;"pKeT-Qtĝ@ }=`A$y)B~Sj/B*e~(QYx7e%^-bW·f%ʫ)/q c@_Xݧ cmPF,??G_bh^DoO~ C.V ѲyraE,+Dsc7@&/3cI(#|.Kđ= Iw@q|l܊~GacOލj(Na]GQKF!"FȼvL^=+bˏWbla☁KX6`}1z/Syy|kBȏ9 (TrD +w2z9&|fH6j=Zd$L*#IvC@\:PysR.V߆agKD*A(}nKƤу{=a2I29"@> %,PVj?nJM T@鳈)6.IOM8KB|2DqO3@O9{H}@qzډ';eJ zFAeF5`# g"t*i=OD'nF" !2 TExK#PiT%FڥϹY3I y|'Zk"raqgp?70`fa9!"1 Njj<=1~خleTBz!.wPy|,H H54Fʠ ܋$M`+]qEV;D; 6Oi7(@xec+ƄqHTF!^*ۈր0UEdj;PPIJ<8Q;YQOʍ,C6Jϲ "L;t$ j,0vuIDc_CBb:1O@_{:³>}(KjCAծXܻ"拓VqbӮ11NF,v/sČɨ\j"Hu2Sh a׊pߖj!d: n<‘X٣tϻURFq?@W86.U1B@8nO Wn7 }(m$ 9*i8@1`0CQJ u4i!fjA7X谣.gʻډ2%$7v$/&B}/:jCtJ& ʯM\_ktP˜2Jvr.B@hp?oЬ:IIОdi18,}F( QVDIS*&ES82Y ١vbPT_iR(6FOajMl@eKrmNkQ"Wqe ITH} Bj%!$J{PjHuy6% I (O:rpW^oRѴcfwis&]PU4DlyO(/9A$_x7`B"@;2|"w9/?]qj@h;d2o㫁I,{ bE:jhig !g=.e(W&Ev5&##gtA= B! kH9S@X>d6`3$WBQw鰴yXe LD\%n9C p&yS>9{.cQ )p碬&Yމ Ǻ @P_^5ȹ @2JH:ۈeQ:ڶ8 {V~rEV$9CNBOti·;Đ`OzEgK@vJ2o @f7p:u pHLxF ;C5Equdr4͠]e$|b > |}_2j erF (wހ^?uջĘK6u *CEFf'mSO2&Lpcj'G1 a(#) }- Q%&cAϡTLjC/I6j*BKd#Smh^q(,y!BI K_@2uFn+u$yrfhԓ,+_Zmܧ/u.ZEmK`;zoG> sdПk b9鐐2ݟMV9## z5+CEG;s0XΈ9h㑼>6)+ & dv} |#(K= !z|mY}=5ju|IhXṵVJ+e6T pX}R~cme W7e O(֣.t6IoNi&{'I ,0Lc](V#^&'FT 8ƁY)BvB'c;X X0j -UPz}lI3Y xg!;f0XAբ>x#,jp,Lf@K>:x8#ʡpt$]RWpe>tCAMN (uŦ끬o]4ЎoIvRPH 0y(ZT9ɥh|M#>/IK' hDFO' P^TRSO뇽BȄ~,>4 twЮFϫMqn75OĠ =&_ jV ^D%s]2Hc+rh` }$rچ{zq|zɓ2hltw5mEmB p XDVGAiRh@V C1\VP}$,"k'(I/WP¼5 >tBro 9X}AI"21))],o::d!6PF/Q[xTZ CbDoS9,Is;pƉz r9oZl_dZA-L# |5#&V+T`M6\λԷ5iAɁeeO<m IiD |ܣ&LgTV6(&+Y.4@YaS9g-N %Y4Ւd5qF810P7&|lӦ\V JNWOfä] |G=r2MA.F '~za{uU{ wy?"4Kyp$z U{ijGp |LWSEn,$_ !w]YM%}M5!.Din4X%oNuu%gYLt xcƙa =ԅd23w07A{kZRrKB2nkʁ1~&6uK̢2`Pc/ fO>6dJv]9E[ToPxTv.f`ƮWq{x7E 'B\M)c?Q6Ms|Di*Oq@`7Z&FƸ[-畼{U@ Kh<5%BhIZiv/AOӜ~ܪ\WӈS#|놓@2]²롐MtTAV3{4N >=hQ!OeSAuK64Ƅ ѓph}(=H^^1hD`Hk[,<7 iS0տKw.i#D(lIeٞxG TGzΓ8АR’fr܇e#9FymIFC}LҼu 4$gG^Q[(mRr!wS#"oΣVADVA.s]**_33!K,o |caF$QG\/+O Kӭ 6g0|#NnZ҄dpB~>1.]bon!6uD-<[_0ht.gY]Ef;j(5f*aP:+$ъ0Y󭇟F̚6/ ֫4ipIIS =1j$j(t#s̡8L–e=\CSlڞR'j4jS,ܩ!zP[VߋjMA JӴ *}!DL˳8A6 'Ճ%HW[{aR0?v2`V֞\6&֒o7510C&G*UhE|e1(N 7l|J2 C=턼`W_?Ti|BloR-*-C rC.dCVް%q=qTIU={dPڀ*O64mUj Eqo6vjqܶ=5/#?Ka=QA]$DA-ܰ4g6HHȪbɥՏzbp y>wRۨIYDW< >)D쯑鵡?e?rɐϻ,q?Σ'L6x(}ς0BxCt۟T'>TÃ({z8t&Lv+LY_CKz\ TQz]kMpb,_=T_F5& pӔFx44C$ Zc5MR""!0&2N# 45|[XKu,%&&8|64ߠ`ފƃƚC= |ZM,Ys|JU*j1j\{=Em zdV4Ϥ^kcPR-` k/,or~z|5k848f͈9[= Ȗ̦:PBQx W# + MS&=waeLzEGi^&[64 5nt˩4ˡM g`'y 5Jנ TO'=|ǹ.{0˽4?QЎ7.WH6'gSnAjRM?M^ &EG6P5L 58F]{,D'ndh 0riuOTX끒+dpgHGmjZP)PXJKt=HH&41:r-zWiJsP K5A40D]UE$n ah_Gs xeOS9& 0QSE ́ԿA筆YSY>dk_k L2=G͗xDU@RA{J]d@3xԝF<JQ6%j|o4NMQVAqKSGO9RcQcȣ5SHܗ:pT^# i(zf4?ы˻f؆z޿K kINdDX'p\ndA[JWF Ii=ZO;x`kiF\@QS#oNs%T?xCftvu׿ar9N_Uc!zjaOLQkRosCpPMKGi5Xi5[ {jޣHIB"~Gh0lo#JCŦqZJvqaqᱬAGU=9#-ĵy4ؒycm V q)\z}ЄQ#RQsصOo@r$$E9NjhP0>>P#vI B`٠ʣ#ԏ^zט#=9hj9p*كsvc=hS+z7&}rV/Hƌ տ|; U["yw>dL'iCCPICC profilex}=H@_ӊ";fNDEt*BZu0 4$).kŪ "%/)=B4+4hmq1];_Ba`Hf1'II{zY=jb@@$eioOm}+*9I$~ό/K!W  @wV~bK ǁ:wFqq+ԁOk--zn-M.w'C6eW y) k^o}>i*y#^ywW{oiu2r7bKGD pHYs+tIME :\U5 IDATxiu&݈ȭrʪBZ$"E\A@$El)Yj%O[jQm͸t<^d[%QBR($KmBUoΏ|e ABTefdތwݾ]µ3  "7bpI*yWr(;eJ" 24r,l6*Nz[F3`&HA3AG@`Ԏ:vX_4 (G:&*E8D]Bç lbhNa*=8f"j7[ <v e4V#i~ߺ&H #H# Ǻ;zb}b6%C f 2dj@QqMY)N1,W a2-.\<]:?xrpre<6Y@`&  y_"*oFzf@LdpboْVژp6&'SD4 *K}MQ~FDADd2AD1f*UcNe&csZ..ĀI@s05fkWRhWoz9l|g2מA#}~ubнp $3g؆a5ٱ3'h@ ^<562FLUj .7xzd<[X &fF󿻸ٹ=^~Ƃnwܪ?w^ !{d}pX,+(H\Sϥ2( ``12x'љ%猲]6])FC9-Xi6TIYy k(:KUoxg7~b쎁=G-#8g%05\*)f[?7O|eɪS5~hi9s/dAViI|d?/%"c? U! `lnѯOvgj_1jc@ROЊrr cUJK5]w ܳ#GY&WHcG{]p*dZYc:RJ;[oHϟc`A|^a՞墫P9轓O+_ )[ΆR]+__7p KژA:ݱo "7%{7[x !ݲs غRZ/{_~!ۨ)SP\*yJAݙHrY˟JM /p"L֟ 2;|.``_ b\m@ag`sOwNrxuCUP=?n2t닸^A|dT_󯿗o{䟈EM'̛Rw9)"2 gz}öS5VukEnfkK~q|[p Jv KEzcS74;7fM.fUfNH'Å{2 kpȞ{w-`jM&O.Ӫ" EAf_zy~~'"WK bt~k0uPo$Jڟgŀ'BDU}Gn[L:5i׆U-lny?祖2vyнׂ+-axRL7ܫ:Qc"5@|}Q3B/˖a}C;>;'s%gZdyl."%nxeЉѥӃ.(Ը Wt>{(WԺu"Wv?:x(ʅdD8G>>:h,ERZWiu]_V2(T$] RW]y8,\uQuPْvθW)Oh|{HzsXfm6H>bii`)~Z2vGs݌5Ҕd1kx݋: `׊LDXߐ/v;tcwr2XXc5?r|v*6rMA\%ZpL3DOwx Et9<0tFR"ßRvo8b=+T8fq/Ud).4.` 5<2r, u-)e:5*VJ`,mw0Pu*[).O- U EP_24Z5yW"-l7#[5w/'tEDDdQ 9͝Rd4|~ώ.rlBkqc&|ޚ] 9qb*?WLfJ'Gz4譅ٹN \b[ ǒ+c.[ƒK?]rb vL8N2 3PDtP`0{JLU\\mSWSBṊPɛ_;C1" !B\H؛+gdž՜2d<+r:80FPvKC9ۍj! aUH#[D$9`l"@u2Z-+\r'}}MA Mswc!0SŽttVDU跎=闓CC Z3n.&3ېیRfrDJځJ! Va'=b0+V>cqTf_ JwtyT3oz +jl{KvƾKUq!>+Q(EfzύZLlx3W N9| A,݊MG_BEk0JKUb+Ԋϖv熫 S\XLg6}_&HHEFťzK{~˩Ly0!ckCp۱pTr)8¦Ii @_,NFLg|658`j LT )̿u23#5$ן2UuNtm^za7 q"8P3N[8:lr|NKg31ӤΞdۘ#3Y,ɏg^0v>wZݬł"5Tw:%`oܱT# Xi#n9<Ԇęy#xr}~a<1|ll96\"&7!˹2J2 $:XU: P-$0{~X:R^ \*VEYj5QKM(.̑Zt)@{߼q(2sB3Oo N/UM0.XG0se՛P-&1sA D}Y*%@2a kV>ZN[ ZL`^T ޴a;ߵ%%2̃N-k'm&΍ZTy7 kcr qS=)ъ*A~I& /ւeX~dsʟ7nN}'E0./eZwѵ!+ 58tBqi{!r窂k_S\$֐OhϱPXڄ]ɶMww=FyIQ&7D{&!CH.lA>UbX MSUa-мm62] N5a;υ8{Ϯ/DB](`rl8`-,ת_.ǰ4'Qrj2 34X8n6{>Om4Z 8Z1mSn}2if֙a#nNؒIQ/ X"jWi"mVz"U o[!\JNoc;y_9X&h1o[0xtl9^8gtz N+S#Uuz\ Wn8n{"es)yCQ"29!DwyJ4>Tئ uzUGS5*.o@%SCE1?c:g\M`E:]}}@fvH#f-W:D (^߼\E4 -o瓵u<4nI'fQ7lBVG |e_Bu%DQaSvCO&`{:3q܊9.MQTmAN-Gz\XM̒Mm}Lĵ_Dr(LcV+@:W@n^W!d+ffFKV]޳1 &9KDI?}%[1MLb١b6kq=WTN3xוQZYaaojEO5╧ f>J/ӄ;)z*dKr٦ptvoX2|Gf>_~sXuBԀ9U[*;s_Zz`^[9B7۩Q-$=q5;# ׿&f5Rn${^]˅JHOhV_ǏU:} 7 TnO@WlO#u~%3K|ɺ8`]%`;t"tQ$&Z*+uy 2z_)3`x WO't^N*[.X`]6+3GlЃbZ;rDtvmA`;G l"܅c vG7 Uh @Ru2FZhB4l#x:5R4Yܴ nQO.`+{,۶aՏ$.D>'1&cl> 2BdYo2;b.~R`]>cL|~ 9*TS[\n,9aqj֣NfL0hSjyM 2ܮWT֌F^o 7$R X*1 P92)vEq`]DVNF(| 3>d=ɌD4nT/UQ.YA\K`qԔB8sO/<:w0˃gceLp X̂k,5b `&6-PFiLPWVX `}XA=386 nŎիEerP!˫*RmZA`t0Gv;B7Dp30Q8e-ua0sfwC+gV34i[-OYdjT>R7M_E aV=߹PQ(@,#|yYo&Z]jz2jeN[KL@XϖԄ4oM/~W(Qy;5fDoɩ V\gHV=>r+#O^*Yɂ93ځ6ZFTY5WqVxK!` ˳)RDQ|krƵ`daIKΩcd_ "+D-$u:6Fp$ eu%nBzM#|fV`[\:I8r/P?ND5-oIf,XL>S߸癳?\*.X5pAMtM%|SG 7L`[.v9PlAzQ2؀cjˑ=R\3hp5uWGQPҵԺT_ka@#sGku?␊hIGX|\}sa<ޅ?tfz>"Kk$ZR [ |n_3dUT:\ZvpXV1VNO S|mHcE4$oxd0Qw7V_3-씛k, vnmia{[fG#Zs0)jdUh \8enij=ypnb]h4+5+C$Ģ򷻹2o5)b}x:3ݾhu2<4箺@~Tyuɠh!XKaW(.{v=?;XԌiȴEB NNͫwV'#],MӰԲ">e̶AWSO[V0 rLe&ۃOJvhM>^6PM+ٽt"iH.` :yknQ]O̘SDp KؠIY @X-ZnY),KQ:7(KGuS 63/C>f*,Ubx{vT.yFɮ!3.-Vۍ`ɀ2Nr2EzjmSKE`yY!6@)@԰(u T9d" C4Z]|W;jf7<ǩj*QV;e_JaDtڛ4nL<~a߁?وȊ]@:ϫ+KZ}i1P?[Cs12v8Pkm\f[+pz*Dɉ 9ΐv޿"` %=-Zoә'WAWm6߀,|VyԻ.,uiFʀc B GFVVU]c+?8)M4BʻS5u>JCo9}k"Uo3ۥ1H~ŕ +p^3VR7Q,EG֝ErIWQ*|3h,U]ixJJB鵬Zlw}[Yhs)?_\f< ǀPKnTA,UuNp yi`52L J+ Y-kjԹleV =׿+T~"њLG3}@R #(j5>g-28NdnݺZ8w6sO%mᡔ9@JIny֑(Zl>]υdX8}J6cV+x5يi{02%3FFN-jQڇ On^Mi Ҭ Aatz:^ -"Wrx3m` 62$ZTZY8=J!=RTYv93w`y&;حZ!YgH{l u̸a _ ѩBtתҀquvI[b5rBl52@T<ץPg+Hz7Ͽ͙sC&ָQ Ĭ=Lfނd@,9n% Nt_:4#&H09nZoXbVInj@쏄kZ 2+:m/Lr@S#1W"uhM o^jeTȿ[ a!Ă\]c`Zxٱ' XiS)܆ɚJ꿲oGa}!PAʻLg ݑ4̇J|AzdB't_mg5 /Sؗl7ωrSW/ aZeto}^a /JW^A٤ZoFaָ,h-O]%"xm2@McMP8,EARgO =wXjxċՙV5⹐- La& ^*Qˢt ڕs[xv26vn5OA Ӫ4X+`a[Oҁ9D8+vW<KHn<t< 7 m=ēQ/nV$5BZr2Jb\HћjʪW;[;5_BI$b3+Uuhkt#0¬C0 "Qk5O^=c>Фv02 ҇[ a"5\4AHfVpr`J0L p,TIdo@~a2or]5Ҽ0 b΢{뫈$g@7%%;߆YuZmo:Ƃ ؓlJkB$:v2pX1ZTK4MB8'fk*}.?%AԛW@Ѕr :Tr=1;([ `N, [@0 3Pj6& G32T2huG ]F$9h:'kSӮ)8}gځ]d<.OD"ծ}ʡw Hl<^/ʩpM.aRԫ_{4-rʻG<*1ƩX M*}afnd b0sb:;qtt;R 1t^ٴ Dg0 򋛐ۊT `'0uJl/[0p"bA#].Jpɠ)MJMȜإ8ڪJTc1zG-L^K 7En~(g&GW^]C8ô}q>7Te"V-J&Z&h5˩{D A$>Q+'1"<<{ YKPT[dj}1j-HO@aq J&u 4B] 7!8O!vzR@G+N6!7:j5Ѯ:µ03 A81Ć΂ GwK=!@G]X L/o@zz -Q1b}5fͭQX)yvq`TK19Q0j|хC-'M1#`py< :D3mQ}S1u9ks["tjQ(da! 7(솬c11Tr),ڃ"$gd` b;j)j \nT 1L0KH غQ"@mm9 r]Y"bt*``aaqV8- 囨YݙU q\3R bg ;PE.@̊ o*6xl{< >Rqq`:35*_ZדOwS #}ngn]IK"t6DE_W$M;9'+/g+ss9Y.j 3PΧPɧ_t Cgj,̦ VA0s+cYөǪ ^Ukêk 둟t;WA0O awlp.yC1y}7s_[M<407 d?!p 2Kcĕ@ŚϘsuz#aO[{a08 8;;Cw s}Xz]Wd2 ^8ޔȭO^f"{ɄLmy5 F#3\ .ӄ@3';[N-H8&/ڟ%GѰP4|އ/4 7 &ӿ{F^3|&t3c{b?05ţɹBdZ z;#}G/^?t7e7b~#\sS 'j Z!9J H@IOzA^X$? ٱnD =.[H{|6LN;@KaO9q3ǘy"Mp;:{p3 s}.-=jC*RBAS|Gn?G]z{;痋ttvύnxHX-ݯ!HȄeK DU!W@ Ros0 qmir:/BtG0RzȬ#5s`8q]R, W%sݫ>fvq.^9&gId#r@+TƖDDMd} Ҁ[nFxG@|#+]"Lc(W{90?ئEsŪ}Ԑ rAH~'5ֲ3Su1djJaݎ 3Q /t5%Bі80+5Y2TjWyownCbӄ˨.܎^rrp[.'qe0s133YhtQԪAmVT6,  vƂ>uң]|2DD,=T]cwc'uS+Ƞ$]5X]=ZܔT 4 eU]~! FglK-TP>>#ѱy@ݽ֓:RAF9Bu֠`#eN !z<Sv)K_'|20 Η-8s+YT8$:‚um >An9@vH#iPp lnKdP[F6QѴ.}Hv[k=!RC̎Mqk!r@u=3^8wCfW$+FPXm-ĿK]χ旈y"jiЉoxz5if!CAnVUeWs0aGxwd=>!I_8.]\ⷵPB2e>5t?ADc(Dp 2ǭV3Qy4L=D1v33a5U`R-E\ߛGG#/lfwX4 2^%yc&Ie 7pur:]RmƫK|q2ԘX֍w%c̜uw$l'uqapy,ng>k87V*tHN[}%vOy}݉?{t/8uC0C/{k~!Dhr&yp@Gv؅dH+К5.V2k-} xqhM2};Jϼ>`Ixad")ǍJ;C]kk?uKAT;7nkd-$*K7Lr|a?x]S7 ^Ζ @;Ls"yh=J "l{ V8CP#Z<.4md X,׆ Qh2|:D?y믏wmy^pW&7Y~f;{ytPv1oSo ҥ4ҥ=TD%kt 7 (g{Qzvx"'yf `; ]* 3|m҆Yɍ3!A>H X: |y]2S#qPgޣfNP^!F.MdAaVo T}deK+(V>׮ŵQ=&Wét;5җy[I'(  f(WC//ܲXh B;bXEa`7vfn:f2sg]UaU`-Oy.Jz")>q˧>x/ljh"`/J QT L\Tlxʓd \9mspBBQ ]`)L7xA2Cm:~LD$gr.ӋU[0:KvlB#ԚzUO3[D2#5@$Dp 9FZ%d:5 9ێkSnS}n??lEQ3ܣ^N']WݡPObCƍwNUg l@-LG!cYv/usSw彯K:) "/;.*24I -[Pʬ8h |,c5Cg2 2q3tdYq H܇+)rJLv9Z ƙň5VE6U%DjnԀ@_,;.6 ̃|?꡽/)c1S2۠i^z24Va׼\2n # V ӁQX@W-h0w7K׍|27 u2>8G}`Y k)d&Q5ֲZ,]-%ݪq8{D"~ONw: C֪#\]|I jMfbz/Dw9|Hus2!cX "[هo}[: %"\SQ|W~>Ţ`n;IAp ˆ.#uAR z2yCOG4)f*煱Lg؃Y⽊CLT-S2jΘ =eMm~ l4] ]+KEs1^.gG߮}[o'3&vyWͭ*{X' wx(!Bfx2`"ITEZ62cMf93sO=32KK4=S=}7Lz^> `g8ILNP湝X(&0JRnxdpX,TuC.;m+Tpgվ#ͯ-[;Le`ӓcR0EΟ+Րq  u;OFCED]ׂt@\3y!%i閮yu.n@mTU'[:܆8WJG X)3GNJ'Gh+ȽW,<.ݑUXMHU8 v/:mG^Δo\ʖrჩ+Њ8 [K٥"^>6c ,hcffBC/{NhBad:T# èݣ1\ 3'0X=Ƚ{I Yxnԧ H$"3fq%\mGsjWX5=l*u&֢24[_?8uy+RL^A2k'_;`yjYx< ™Z2lF2d\ϗlJ75bَ'F2s?1J^sQP'Q(D5+ FQ!*NSsX܍:/bLϾ9Mp BSB*)[RC7o=qGU]eXI{|p8._v?7>.v"D!ʀ(ՌZ8-)Y,Ai{q\&_0Ϝ|b D9 9(*ӽ\9 p S>r吺]n0"X`R`# FZpbsݍKw!I uc -gˡ7>zMgqեmQ;wU624I7AQ{\tU*U:r{'F@,*H &u2 h0ZPx"q.!uA8{>Ɔ;[T.Zi;U IDATB.iZɐ'7_d> 008@@}Lz1jvL-cT{?Z 3(UMTɼOL3ʶ7&zd2,fXF._Tc:_gz (pddH;}ar Ruja $6(7\RןE,b!4[䟀+C4l#ppo0je`t7>xɈfVc"^c75y@M0z$ ◡:_D[s8kAq,qrSe0fa R㊔A}HEu#~t+L9PHM#5ٕf8tvf= O}2 BeJЍL8t(VLgcHfvGk<#塲:JgOGjog81 FJOy`. 8 gMo6Gy= 2ދ7\ZzmC?<6d4?̎L$!.$KayH(ܚy˅J{L9'} gv5.Iv<]$3^/߽ g{{uA88Ս~+VCFc4j,5LbOB AppXW;1PˠBߵMf]@`}.o Z/'gH dX(byDCUT+Fb8;@-0'!@E7S'q"*ZC]L@vZnZ?G?FlR /_~yvs7m\Dd/ԀZxtOʕaõَ@>3l硝 UWua6:#Б{\@E]/S )HD1H+xh9|=gq:6j%BPM=߅'z~r pY%0lygg,RY{Yy ?] 6ϞR =d"rLx}}]Nsak#lTqr1~n>GHLD׮ z Γܝ/ZjaXDF)4,Eu7N@+بVsCZ􊫇ݘk/lN%hRjJ&]9|CMHFn]ʝ+[8=K}-8xUaH-$a|!<LyD\ma LDO݄،|%;U [o8swdwuq_*"sQ#8-7ŭ3c8TY",Cցy[VÓN\2+_~#b2Q 4TS>HH;sØ?:<$n:=Ti]g6vͣcY*AR-櫎'{𥗯s7`b9Zxk֎1+ឭ3ch7K;ZF,Tet1хÓ7lEۿtʧ>7 &\kbHc;⁥lza 6~ Υ'Z¿}M|zIXpwkﵒ==)p/!:4w}ΟYߴpaWD՗RHuWS r;Pb!,݂r@n˄X-OL}AûO#⦍Y D)86R`.%qx _yc+{c Ő_y]2,677# T!S `6ӁB_pxP9vu=G8U"g^fl+?ys:ډbrlNM "A X['`w,?~x+{iMӇQ&F,G%h 54(W 0eYfX̜K7=`5Z)fj~N#a$7C)Ӌ6W֣ZH." DiTGnP7m\H.}'n$TK-+\8`y:O}!f#ڱ2;{PICH2b?^z#ny-4 (Tm̮'^\سg.GC' uRwoGb7G竰Z\έzW [phJ@Dp4g޸js:8#\a•to|춷ѹ@jb ŗ \ךn[m uݹڛ PR)]΍Q \:ׄ??r9@ʠq% 2Gd4WTXqƐp84|Ie8x.* 󼯕*cr:Ym^tW2G'4R3RrH17l7u``3<Sۘ9sZQBa!SqC+U|d*nD|ۦ͝)eA"jeOY3[p,Fu =vxè!j2Cx|/ _i-"ljV!VoLr@Yi>Q7cN0$^݌{ڛB]ۺn^GRk ZCFe?5OM2:@snTH-38 +[q PK"i ٧Tr8$XJ0xC-9Z#GZgü}ޛ}4AZЭ꘍yt$PJ`A[^:p|ߪd~G{ O_~FSl"!-P7>EX6hF~ERu3(@r,T ]/zko$Juj{1@۳dœqbNFF1Gd> -|m 1e8i 3gMq ]],%W%uRήiB*AHBm~@$@r" ;_zqa2ru }}ܼP NT==B,h8`7<  F *;rgVRG6W*,wVu+8 G2Q|<;sd(#E,9cQpZPsA[I8Sxc[Aj§Q>E6o:͏4Wl=qh\CT,ivzJ@%fn!HuN呝ʃ)15[&CW6_'t}Dňnצph,aحO30U庉*I]Pnxrxs-L4O8r' ='Ԗ٪PX\"+%6 MggaN^E̞QmZ"{at6N׎pIض#|+Ws.Ionґ#ڨӘopX)C!g6*]cr ϧL(J$b/ [rZN@SN3[&<^0QM5K0t) XݔEzU"Oҟ`O"$58ţ[lSE*B fAW1_Ke_b6_PXƛbqmQ\6h`uSCiZieVҋ4зT"R&SR 8e ]77X4~KxhXFQwRE>fe_UJDIfƈh8'6?y-> @(i*V B5i~VT$Kjɯ#64 %bbuk&~oHٖ=rIZVnjH5)PeF$X]O"hI~׊@| dKޜGODsVu/%IѾH0??~ysq[RFr ՕLT8MPO!+7CDZskqf^ a4CWc}GzkD1D-kr[/8vjS|~7 `asvrg79E c^[xw%/g1,D>IDATF[H[|s֠d:Nɼ̗)SDb Id2xGg1U&UNu9ဃ ^mv qnpC=0O 8? 9wKwngI2?ĭx-$:/qf(̣LT# .1Jivd (B e"ǫ  \|,3Xv|Lm DџW8Fkl_UoGUq|>F3;sK_oA &BeQTs03g*rN@-bUF^0HƕŪߚBQ.zs_}}R ½OJo6p:;K =CK/>hvmяbb`-$j`\qHjbquIx w~_J밥^a o[b+3[7&R3qTQkn*vqJUUiNbUaĿy/=0`pF=HhX6CS&'*RqViȐSa w3+WgĊ[ f"V*Dyp\KcM[ 1sݛ7ptѰ|yY8N IX]M_κ E*mKX*oЏܺk{x^;v GoH_X^ m` ՁA^cO@w@7|__:5E.U8.ލ;ক߅5B@Fre8}VtA?m}&^Է_WxYQ(WYBč;3)yzZ c,ϒk܉?]O[Z}T>UקRN 0"<6J 92[= Kk+Vrm19c^_ Sع O൓-xilX3 y) Ba FrŖ s[{.`}GMYωׯ)N%'N36uks :5nQ-ѿr ;[HeCJ0:Cn6ق-$b yE%ID  ]Zϐ^/i:|wK>!3DI"(R1Ydasf&PPv"#ﻁwš yԼ+ lP$7@Dd0#rOGEnT1 @|,<Ƥ 7+OQS'2 VtBDV/ 4<4{ Į{hzN϶; c32MʋʤUwJw@)쓐Xp uhZ@![)4l?'paw\#paƻѰ~jY. s\ d,5.ώs<#5&uaOE˸+a$ZNc@ ~ _Yp"Ƞ6(V 51X?˼BK/fbAkS4 yΆ3shQ">U #K.YZ!/թ1<njkfTwmݎ y  1:&::,|B-f4>u•nҰ B]^ \ *$V*/ uQ&V-4O0%wX` rp33dsƒ}bx$} d*9Wo+-+#/F)b~M#MbRJ "Mx 䃡 n dFY2hA;[$8=Eg% 2,Ff;%-=p`t3[>:`tf@a$( bۓ r`G0D4䓅B`a82Jpfaf >ރ14 AxCV3naY^ +bYH"=MVu3ȁ[@/Ǫ ͩ™u",$r ?`'%bP%b- ٥";tFxCMYn!/e:z"8R6cw\жn0L>ҥ2?sf 43sI"$)e@+ w"҃|,B7$( Wqy,0{o*n"[][OUQ7-UP]C)paܦk-M0XfR1!334XއÏ_&L)a$BJ3 K*Yh5GnN ŵ.ڡCԍ0; 4afDžݔyZ Ĝ$$bNq Qf)WKj7[D˃q:|fGP)&Є5-{$e^^r`2gCquA`X3 D%Xr e)(Ҹ{rp[w ZGp`X\UjepT2'Ir,Q/+&u`;> b-{}.3AG( L q'GS͘9ѨZe/z1DDctgi/GInyx?Z?H= 6M:1DP"+m_GV;QnKQ43Z? @|7Cj,7 INJQ8@y^:Y\DJ)S3>[`;fquc0D\eZ?ɝR\ʋcI&>@Qp7gFݏM@AiI%\9e BjV_.FyZԙg͉ `%)j>w:"h)rmUVV= <|9iej_ސE.*0RA|G_ !"{/,qMdz4V] t yT5]8P8n^ jp8.%)l50pM{y,4!3%pvS`_^/fv)-C|dv̪&nzBai+!OP㇆i[d7=ǰ^#cTU {'^fCn^̱8Ic7EN?׋7c4{ZUDݼtrΤ֖uF>qsb(+&.`PC"!Èn5 {ua,/@@/3܋d'eތA@qeu trI33+qu!j l *Ugu " 0DxS[oH#ؓG43U z93p|7r4u<{: '-,@E 9yI俛k ("{f==rʂT D b$W\3J n#6B|d%9`fX^ u^(f fqð s:_hz(k image/svg+xml parsing serializing querying storing vincent_donofrio has_name Vincent D'Onofrio starred_in similar_plot_to released_in 1999 the_thirteenth_floor law_&_order_criminal_intent is_a movie tv_show chris_noth sex_and_the_city the_matrix released_in starred_in starred_in starred_in is_a is_a is_a RDF Graph RDF Triples RDF Graph RDF Graph RDFa turtle / n3 RDF/XML ntriples TriX Sleepycat SPARQLStore Memory Store-providedSPARQL programmaticaccess to triples SPARQL 1.1Engine JSON-LD RDF/JSON SPARQLStore RDF/JSON JSON-LD RDFLib nquads TriG TriX RDF/XML ntriples turtle / n3 microdata rdflib-6.1.1/docs/_static/pyramid.css000066400000000000000000000131311415774155300175310ustar00rootroot00000000000000/* * pylons.css_t * ~~~~~~~~~~~~ * * Sphinx stylesheet -- pylons theme. * * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ @import url("basic.css"); /* -- page layout ----------------------------------------------------------- */ body { font-family: "Nobile", sans-serif; font-size: 100%; background-color: #393939; color: #ffffff; margin: 0; padding: 0; } div.documentwrapper { float: left; width: 100%; } div.bodywrapper { margin: 0 0 0 230px; } hr { border: 1px solid #B1B4B6; } div.document { background-color: #eee; } div.header { width:100%; background: #f4ad32 url(headerbg.png) repeat-x 0 top; border-bottom: 2px solid #ffffff; } div.logo { text-align: center; padding-top: 10px; } div.body { background-color: #ffffff; color: #3E4349; padding: 0 30px 30px 30px; font-size: 1em; border: 2px solid #ddd; border-right-style: none; overflow: auto; } div.footer { color: #ffffff; width: 100%; padding: 13px 0; text-align: center; font-size: 75%; background: transparent; clear:both; } div.footer a { color: #ffffff; text-decoration: none; } div.footer a:hover { color: #e88f00; text-decoration: underline; } div.related { line-height: 30px; color: #373839; font-size: 0.8em; background-color: #eee; } div.related a { color: #1b61d6; } div.related ul { padding-left: 240px; } div.sphinxsidebar { font-size: 0.75em; line-height: 1.5em; } div.sphinxsidebarwrapper{ padding: 10px 0; } div.sphinxsidebar h3, div.sphinxsidebar h4 { font-family: "Neuton", sans-serif; color: #373839; font-size: 1.4em; font-weight: normal; margin: 0; padding: 5px 10px; border-bottom: 2px solid #ddd; } div.sphinxsidebar h4{ font-size: 1.3em; } div.sphinxsidebar h3 a { color: #000000; } div.sphinxsidebar p { color: #888; padding: 5px 20px; } div.sphinxsidebar p.topless { } div.sphinxsidebar ul { margin: 10px 20px; padding: 0; color: #373839; } div.sphinxsidebar a { color: #444; } div.sphinxsidebar input { border: 1px solid #ccc; font-family: sans-serif; font-size: 1em; } div.sphinxsidebar input[type=text]{ margin-left: 20px; } /* -- sidebars -------------------------------------------------------------- */ div.sidebar { margin: 0 0 0.5em 1em; border: 2px solid #c6d880; background-color: #e6efc2; width: 40%; float: right; border-right-style: none; border-left-style: none; padding: 10px 20px; } p.sidebar-title { font-weight: bold; } /* -- body styles ----------------------------------------------------------- */ a, a .pre { color: #1b61d6; text-decoration: none; } a:hover, a:hover .pre { text-decoration: underline; } div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { font-family: "Neuton", sans-serif; background-color: #ffffff; font-weight: normal; color: #373839; margin: 30px 0px 10px 0px; padding: 5px 0; } div.body h1 { border-top: 20px solid white; margin-top: 0; font-size: 200%; } div.body h2 { font-size: 150%; background-color: #ffffff; } div.body h3 { font-size: 120%; background-color: #ffffff; } div.body h4 { font-size: 110%; background-color: #ffffff; } div.body h5 { font-size: 100%; background-color: #ffffff; } div.body h6 { font-size: 100%; background-color: #ffffff; } a.headerlink { color: #1b61d6; font-size: 0.8em; padding: 0 4px 0 4px; text-decoration: none; } a.headerlink:hover { text-decoration: underline; } div.body p, div.body dd, div.body li { line-height: 1.5em; } div.admonition p.admonition-title + p { display: inline; } div.highlight{ background-color: white; } div.note { border: 2px solid #7a9eec; border-right-style: none; border-left-style: none; padding: 10px 20px 10px 60px; background: #e1ecfe url(dialog-note.png) no-repeat 10px 8px; } div.seealso { background: #fff6bf url(dialog-seealso.png) no-repeat 10px 8px; border: 2px solid #ffd324; border-left-style: none; border-right-style: none; padding: 10px 20px 10px 60px; } div.topic { background: #eeeeee; border: 2px solid #C6C9CB; padding: 10px 20px; border-right-style: none; border-left-style: none; } div.warning { background: #fbe3e4 url(dialog-warning.png) no-repeat 10px 8px; border: 2px solid #fbc2c4; border-right-style: none; border-left-style: none; padding: 10px 20px 10px 60px; } p.admonition-title { display: none; } p.admonition-title:after { content: ":"; } pre { padding: 10px; background-color: #fafafa; color: #222; line-height: 1.2em; border: 2px solid #C6C9CB; font-size: 1.1em; margin: 1.5em 0 1.5em 0; border-right-style: none; border-left-style: none; } tt { background-color: transparent; color: #222; font-size: 1.1em; font-family: monospace; } .viewcode-back { font-family: "Nobile", sans-serif; } div.viewcode-block:target { background-color: #fff6bf; border: 2px solid #ffd324; border-left-style: none; border-right-style: none; padding: 10px 20px; } table.highlighttable { width: 100%; } table.highlighttable td { padding: 0; } a em.std-term { color: #007f00; } a:hover em.std-term { text-decoration: underline; } .download { font-family: "Nobile", sans-serif; font-weight: normal; font-style: normal; } tt.xref { font-weight: normal; font-style: normal; }rdflib-6.1.1/docs/_themes/000077500000000000000000000000001415774155300153515ustar00rootroot00000000000000rdflib-6.1.1/docs/_themes/armstrong/000077500000000000000000000000001415774155300173655ustar00rootroot00000000000000rdflib-6.1.1/docs/_themes/armstrong/LICENSE000066400000000000000000000022151415774155300203720ustar00rootroot00000000000000Copyright (c) 2011 Bay Citizen & Texas Tribune Original ReadTheDocs.org code Copyright (c) 2010 Charles Leifer, Eric Holscher, Bobby Grace Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. rdflib-6.1.1/docs/_themes/armstrong/README000066400000000000000000000001601415774155300202420ustar00rootroot00000000000000This is the Armstrong Sphinx theme from https://github.com/armstrong/armstrong_sphinx Used under BSD license. rdflib-6.1.1/docs/_themes/armstrong/layout.html000066400000000000000000000031751415774155300215760ustar00rootroot00000000000000{% extends "basic/layout.html" %} {% set script_files = script_files + [pathto("_static/searchtools.js", 1)] %} {% block htmltitle %} {{ super() }} {% endblock %} {% block footer %} {% if theme_analytics_code %} {% endif %} {% endblock %} rdflib-6.1.1/docs/_themes/armstrong/static/000077500000000000000000000000001415774155300206545ustar00rootroot00000000000000rdflib-6.1.1/docs/_themes/armstrong/static/rtd.css_t000066400000000000000000000410301415774155300225000ustar00rootroot00000000000000/* * rtd.css * ~~~~~~~~~~~~~~~ * * Sphinx stylesheet -- sphinxdoc theme. Originally created by * Armin Ronacher for Werkzeug. * * Customized for ReadTheDocs by Eric Pierce & Eric Holscher * * :copyright: Copyright 2007-2010 by the Sphinx team, see AUTHORS. * :license: BSD, see LICENSE for details. * */ /* RTD colors * light blue: {{ theme_light_color }} * medium blue: {{ theme_medium_color }} * dark blue: {{ theme_dark_color }} * dark grey: {{ theme_grey_color }} * * medium blue hover: {{ theme_medium_color_hover }}; * green highlight: {{ theme_green_highlight }} * light blue (project bar): {{ theme_light_color }} */ @import url("basic.css"); /* PAGE LAYOUT -------------------------------------------------------------- */ body { font: 100%/1.5 "ff-meta-web-pro-1","ff-meta-web-pro-2",Arial,"Helvetica Neue",sans-serif; text-align: center; color: black; background-color: {{ theme_background }}; padding: 0; margin: 0; } div.document { text-align: left; background-color: {{ theme_light_color }}; } div.bodywrapper { background-color: {{ theme_white }}; border-left: 1px solid {{ theme_lighter_gray }}; border-bottom: 1px solid {{ theme_lighter_gray }}; margin: 0 0 0 16em; } div.body { margin: 0; padding: 0.5em 1.3em; max-width: 55em; min-width: 20em; } div.related { font-size: 1em; background-color: {{ theme_background }}; } div.documentwrapper { float: left; width: 100%; background-color: {{ theme_light_color }}; } p.logo { padding-top: 30px; } /* HEADINGS --------------------------------------------------------------- */ h1 { margin: 0; padding: 0.7em 0 0.3em 0; font-size: 1.5em; line-height: 1.15; color: {{ theme_h1 }}; clear: both; } h2 { margin: 2em 0 0.2em 0; font-size: 1.35em; padding: 0; color: {{ theme_h2 }}; } h3 { margin: 1em 0 -0.3em 0; font-size: 1.2em; color: {{ theme_h3 }}; } div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a { color: black; } h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor { display: none; margin: 0 0 0 0.3em; padding: 0 0.2em 0 0.2em; color: {{ theme_gray_a }} !important; } h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, h5:hover a.anchor, h6:hover a.anchor { display: inline; } h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover, h5 a.anchor:hover, h6 a.anchor:hover { color: {{ theme_gray_7 }}; background-color: {{ theme_dirty_white }}; } /* LINKS ------------------------------------------------------------------ */ /* Normal links get a pseudo-underline */ a { color: {{ theme_link_color }}; text-decoration: none; border-bottom: 1px solid {{ theme_link_color_decoration }}; } /* Links in sidebar, TOC, index trees and tables have no underline */ .sphinxsidebar a, .toctree-wrapper a, .indextable a, #indices-and-tables a { color: {{ theme_dark_gray }}; text-decoration: none; border-bottom: none; } /* Most links get an underline-effect when hovered */ a:hover, div.toctree-wrapper a:hover, .indextable a:hover, #indices-and-tables a:hover { color: {{ theme_black }}; text-decoration: none; border-bottom: 1px solid {{ theme_black }}; } /* Footer links */ div.footer a { color: {{ theme_background_text_link }}; text-decoration: none; border: none; } div.footer a:hover { color: {{ theme_medium_color_link_hover }}; text-decoration: underline; border: none; } /* Permalink anchor (subtle grey with a red hover) */ div.body a.headerlink { color: {{ theme_lighter_gray }}; font-size: 1em; margin-left: 6px; padding: 0 4px 0 4px; text-decoration: none; border: none; } div.body a.headerlink:hover { color: {{ theme_negative_text }}; border: none; } /* NAVIGATION BAR --------------------------------------------------------- */ div.related ul { height: 2.5em; } div.related ul li { margin: 0; padding: 0.65em 0; float: left; display: block; color: {{ theme_background_link_half }}; /* For the >> separators */ font-size: 0.8em; } div.related ul li.right { float: right; margin-right: 5px; color: transparent; /* Hide the | separators */ } /* "Breadcrumb" links in nav bar */ div.related ul li a { order: none; background-color: inherit; font-weight: bold; margin: 6px 0 6px 4px; line-height: 1.75em; color: {{ theme_background_link }}; text-shadow: 0 1px rgba(0, 0, 0, 0.5); padding: 0.4em 0.8em; border: none; border-radius: 3px; } /* previous / next / modules / index links look more like buttons */ div.related ul li.right a { margin: 0.375em 0; background-color: {{ theme_medium_color_hover }}; text-shadow: 0 1px rgba(0, 0, 0, 0.5); border-radius: 3px; -webkit-border-radius: 3px; -moz-border-radius: 3px; } /* All navbar links light up as buttons when hovered */ div.related ul li a:hover { background-color: {{ theme_medium_color }}; color: {{ theme_white }}; text-decoration: none; border-radius: 3px; -webkit-border-radius: 3px; -moz-border-radius: 3px; } /* Take extra precautions for tt within links */ a tt, div.related ul li a tt { background: inherit !important; color: inherit !important; } /* SIDEBAR ---------------------------------------------------------------- */ div.sphinxsidebarwrapper { padding: 0; } div.sphinxsidebar { margin: 0; margin-left: -100%; float: left; top: 3em; left: 0; padding: 0 1em; width: 14em; font-size: 1em; text-align: left; background-color: {{ theme_light_color }}; } div.sphinxsidebar img { max-width: 12em; } div.sphinxsidebar h3, div.sphinxsidebar h4 { margin: 1.2em 0 0.3em 0; font-size: 1em; padding: 0; color: {{ theme_gray_2 }}; font-family: "ff-meta-web-pro-1", "ff-meta-web-pro-2", "Arial", "Helvetica Neue", sans-serif; } div.sphinxsidebar h3 a { color: {{ theme_grey_color }}; } div.sphinxsidebar ul, div.sphinxsidebar p { margin-top: 0; padding-left: 0; line-height: 130%; background-color: {{ theme_light_color }}; } /* No bullets for nested lists, but a little extra indentation */ div.sphinxsidebar ul ul { list-style-type: none; margin-left: 1.5em; padding: 0; } /* A little top/bottom padding to prevent adjacent links' borders * from overlapping each other */ div.sphinxsidebar ul li { padding: 1px 0; } /* A little left-padding to make these align with the ULs */ div.sphinxsidebar p.topless { padding-left: 0 0 0 1em; } /* Make these into hidden one-liners */ div.sphinxsidebar ul li, div.sphinxsidebar p.topless { white-space: nowrap; overflow: hidden; } /* ...which become visible when hovered */ div.sphinxsidebar ul li:hover, div.sphinxsidebar p.topless:hover { overflow: visible; } /* Search text box and "Go" button */ #searchbox { margin-top: 2em; margin-bottom: 1em; background: {{ theme_dirtier_white }}; padding: 0.5em; border-radius: 6px; -moz-border-radius: 6px; -webkit-border-radius: 6px; } #searchbox h3 { margin-top: 0; } /* Make search box and button abut and have a border */ input, div.sphinxsidebar input { border: 1px solid {{ theme_gray_9 }}; float: left; } /* Search textbox */ input[type="text"] { margin: 0; padding: 0 3px; height: 20px; width: 144px; border-top-left-radius: 3px; border-bottom-left-radius: 3px; -moz-border-radius-topleft: 3px; -moz-border-radius-bottomleft: 3px; -webkit-border-top-left-radius: 3px; -webkit-border-bottom-left-radius: 3px; } /* Search button */ input[type="submit"] { margin: 0 0 0 -1px; /* -1px prevents a double-border with textbox */ height: 22px; color: {{ theme_dark_gray }}; background-color: {{ theme_light_color }}; padding: 1px 4px; font-weight: bold; border-top-right-radius: 3px; border-bottom-right-radius: 3px; -moz-border-radius-topright: 3px; -moz-border-radius-bottomright: 3px; -webkit-border-top-right-radius: 3px; -webkit-border-bottom-right-radius: 3px; } input[type="submit"]:hover { color: {{ theme_white }}; background-color: {{ theme_green_highlight }}; } div.sphinxsidebar p.searchtip { clear: both; padding: 0.5em 0 0 0; background: {{ theme_dirtier_white }}; color: {{ theme_gray }}; font-size: 0.9em; } /* Sidebar links are unusual */ div.sphinxsidebar li a, div.sphinxsidebar p a { background: {{ theme_light_color }}; /* In case links overlap main content */ border-radius: 3px; -moz-border-radius: 3px; -webkit-border-radius: 3px; border: 1px solid transparent; /* To prevent things jumping around on hover */ padding: 0 5px 0 5px; } div.sphinxsidebar li a:hover, div.sphinxsidebar p a:hover { color: {{ theme_black }}; text-decoration: none; border: 1px solid {{ theme_light_gray }}; } /* Tweak any link appearing in a heading */ div.sphinxsidebar h3 a { } /* OTHER STUFF ------------------------------------------------------------ */ cite, code, tt { font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; font-size: 0.95em; letter-spacing: 0.01em; } tt { background-color: {{ theme_code_background }}; color: {{ theme_dark_gray }}; } tt.descname, tt.descclassname, tt.xref { border: 0; } hr { border: 1px solid {{ theme_ruler }}; margin: 2em; } pre, #_fontwidthtest { font-family: 'Consolas', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace; margin: 1em 2em; font-size: 0.95em; letter-spacing: 0.015em; line-height: 120%; padding: 0.5em; border: 1px solid {{ theme_lighter_gray }}; background-color: {{ theme_code_background }}; border-radius: 6px; -moz-border-radius: 6px; -webkit-border-radius: 6px; } pre a { color: inherit; text-decoration: underline; } td.linenos pre { padding: 0.5em 0; } div.quotebar { background-color: {{ theme_almost_white }}; max-width: 250px; float: right; padding: 2px 7px; border: 1px solid {{ theme_lighter_gray }}; } div.topic { background-color: {{ theme_almost_white }}; } table { border-collapse: collapse; margin: 0 -0.5em 0 0; } table td, table th { padding: 0.2em 0.5em 0.2em 0.5em; } /* ADMONITIONS AND WARNINGS ------------------------------------------------- */ /* Shared by admonitions, warnings and sidebars */ div.admonition, div.warning, div.sidebar { font-size: 0.9em; margin: 2em; padding: 0; /* border-radius: 6px; -moz-border-radius: 6px; -webkit-border-radius: 6px; */ } div.admonition p, div.warning p, div.sidebar p { margin: 0.5em 1em 0.5em 1em; padding: 0; } div.admonition pre, div.warning pre, div.sidebar pre { margin: 0.4em 1em 0.4em 1em; } div.admonition p.admonition-title, div.warning p.admonition-title, div.sidebar p.sidebar-title { margin: 0; padding: 0.1em 0 0.1em 0.5em; color: white; font-weight: bold; font-size: 1.1em; text-shadow: 0 1px rgba(0, 0, 0, 0.5); } div.admonition ul, div.admonition ol, div.warning ul, div.warning ol, div.sidebar ul, div.sidebar ol { margin: 0.1em 0.5em 0.5em 3em; padding: 0; } /* Admonitions and sidebars only */ div.admonition, div.sidebar { border: 1px solid {{ theme_positive_dark }}; background-color: {{ theme_positive_light }}; } div.admonition p.admonition-title, div.sidebar p.sidebar-title { background-color: {{ theme_positive_medium }}; border-bottom: 1px solid {{ theme_positive_dark }}; } /* Warnings only */ div.warning { border: 1px solid {{ theme_negative_dark }}; background-color: {{ theme_negative_light }}; } div.warning p.admonition-title { background-color: {{ theme_negative_medium }}; border-bottom: 1px solid {{ theme_negative_dark }}; } /* Sidebars only */ div.sidebar { max-width: 200px; } div.versioninfo { margin: 1em 0 0 0; border: 1px solid {{ theme_lighter_gray }}; background-color: {{ theme_light_medium_color }}; padding: 8px; line-height: 1.3em; font-size: 0.9em; } .viewcode-back { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', 'Verdana', sans-serif; } div.viewcode-block:target { background-color: {{ theme_viewcode_bg }}; border-top: 1px solid {{ theme_viewcode_border }}; border-bottom: 1px solid {{ theme_viewcode_border }}; } dl { margin: 1em 0 2.5em 0; } /* Highlight target when you click an internal link */ dt:target { background: {{ theme_highlight }}; } /* Don't highlight whole divs */ div.highlight { background: transparent; } /* But do highlight spans (so search results can be highlighted) */ span.highlight { background: {{ theme_highlight }}; } div.footer { background-color: {{ theme_background }}; color: {{ theme_background_text }}; padding: 0 2em 2em 2em; clear: both; font-size: 0.8em; text-align: center; } p { margin: 0.8em 0 0.5em 0; } .section p img { margin: 1em 2em; } /* MOBILE LAYOUT -------------------------------------------------------------- */ @media screen and (max-width: 600px) { h1, h2, h3, h4, h5 { position: relative; } ul { padding-left: 1.75em; } div.bodywrapper a.headerlink, #indices-and-tables h1 a { color: {{ theme_almost_dirty_white }}; font-size: 80%; float: right; line-height: 1.8; position: absolute; right: -0.7em; visibility: inherit; } div.bodywrapper h1 a.headerlink, #indices-and-tables h1 a { line-height: 1.5; } pre { font-size: 0.7em; overflow: auto; word-wrap: break-word; white-space: pre-wrap; } div.related ul { height: 2.5em; padding: 0; text-align: left; } div.related ul li { clear: both; color: {{ theme_dark_color }}; padding: 0.2em 0; } div.related ul li:last-child { border-bottom: 1px dotted {{ theme_medium_color }}; padding-bottom: 0.4em; margin-bottom: 1em; width: 100%; } div.related ul li a { color: {{ theme_dark_color }}; padding-right: 0; } div.related ul li a:hover { background: inherit; color: inherit; } div.related ul li.right { clear: none; padding: 0.65em 0; margin-bottom: 0.5em; } div.related ul li.right a { color: {{ theme_white }}; padding-right: 0.8em; } div.related ul li.right a:hover { background-color: {{ theme_medium_color }}; } div.body { clear: both; min-width: 0; word-wrap: break-word; } div.bodywrapper { margin: 0 0 0 0; } div.sphinxsidebar { float: none; margin: 0; width: auto; } div.sphinxsidebar input[type="text"] { height: 2em; line-height: 2em; width: 70%; } div.sphinxsidebar input[type="submit"] { height: 2em; margin-left: 0.5em; width: 20%; } div.sphinxsidebar p.searchtip { background: inherit; margin-bottom: 1em; } div.sphinxsidebar ul li, div.sphinxsidebar p.topless { white-space: normal; } .bodywrapper img { display: block; margin-left: auto; margin-right: auto; max-width: 100%; } div.documentwrapper { float: none; } div.admonition, div.warning, pre, blockquote { margin-left: 0em; margin-right: 0em; } .body p img { margin: 0; } #searchbox { background: transparent; } .related:not(:first-child) li { display: none; } .related:not(:first-child) li.right { display: block; } div.footer { padding: 1em; } .rtd_doc_footer .badge { float: none; margin: 1em auto; position: static; } .rtd_doc_footer .badge.revsys-inline { margin-right: auto; margin-bottom: 2em; } table.indextable { display: block; width: auto; } .indextable tr { display: block; } .indextable td { display: block; padding: 0; width: auto !important; } .indextable td dt { margin: 1em 0; } ul.search { margin-left: 0.25em; } ul.search li div.context { font-size: 90%; line-height: 1.1; margin-bottom: 1; margin-left: 0; } } rdflib-6.1.1/docs/_themes/armstrong/theme-old.conf000066400000000000000000000025411415774155300221140ustar00rootroot00000000000000[theme] inherit = default stylesheet = rtd.css pygment_style = default show_sphinx = False [options] show_rtd = True white = #ffffff almost_white = #f8f8f8 barely_white = #f2f2f2 dirty_white = #eeeeee almost_dirty_white = #e6e6e6 dirtier_white = #DAC6AF lighter_gray = #cccccc gray_a = #aaaaaa gray_9 = #999999 light_gray = #888888 gray_7 = #777777 gray = #666666 dark_gray = #444444 gray_2 = #222222 black = #111111 light_color = #EDE4D8 light_medium_color = #DDEAF0 medium_color_link = #634320 medium_color_link_hover = #261a0c dark_color = rgba(160, 109, 52, 1.0) h1 = #1f3744 h2 = #335C72 h3 = #638fa6 link_color = #335C72 link_color_decoration = #99AEB9 medium_color_hover = rgba(255, 255, 255, 0.25) medium_color = rgba(255, 255, 255, 0.5) green_highlight = #8ecc4c positive_dark = rgba(51, 77, 0, 1.0) positive_medium = rgba(102, 153, 0, 1.0) positive_light = rgba(102, 153, 0, 0.1) negative_dark = rgba(51, 13, 0, 1.0) negative_medium = rgba(204, 51, 0, 1.0) negative_light = rgba(204, 51, 0, 0.1) negative_text = #c60f0f ruler = #abc viewcode_bg = #f4debf viewcode_border = #ac9 highlight = #ffe080 code_background = rgba(0, 0, 0, 0.075) background = rgba(135, 57, 34, 1.0) background_link = rgba(212, 195, 172, 1.0) background_link_half = rgba(212, 195, 172, 0.5) background_text = rgba(212, 195, 172, 1.0) background_text_link = rgba(171, 138, 93, 1.0) rdflib-6.1.1/docs/_themes/armstrong/theme.conf000066400000000000000000000021731415774155300213410ustar00rootroot00000000000000[theme] inherit = default stylesheet = rtd.css pygment_style = default show_sphinx = False [options] show_rtd = True white = #ffffff almost_white = #f8f8f8 barely_white = #f2f2f2 dirty_white = #eeeeee almost_dirty_white = #e6e6e6 dirtier_white = #dddddd lighter_gray = #cccccc gray_a = #aaaaaa gray_9 = #999999 light_gray = #888888 gray_7 = #777777 gray = #666666 dark_gray = #444444 gray_2 = #222222 black = #111111 light_color = #e8ecef light_medium_color = #DDEAF0 medium_color = #8ca1af medium_color_link = #86989b medium_color_link_hover = #a6b8bb dark_color = #465158 h1 = #000000 h2 = #465158 h3 = #6c818f link_color = #444444 link_color_decoration = #CCCCCC medium_color_hover = #697983 green_highlight = #8ecc4c positive_dark = #609060 positive_medium = #70a070 positive_light = #e9ffe9 negative_dark = #900000 negative_medium = #b04040 negative_light = #ffe9e9 negative_text = #c60f0f ruler = #abc viewcode_bg = #f4debf viewcode_border = #ac9 highlight = #ffe080 code_background = #eeeeee background = #465158 background_link = #ffffff background_link_half = #ffffff background_text = #eeeeee background_text_link = #86989b rdflib-6.1.1/docs/apidocs/000077500000000000000000000000001415774155300153475ustar00rootroot00000000000000rdflib-6.1.1/docs/apidocs/.gitignore000066400000000000000000000000301415774155300173300ustar00rootroot00000000000000modules.rst rdflib*.rst rdflib-6.1.1/docs/apidocs/examples.rst000066400000000000000000000045361415774155300177270ustar00rootroot00000000000000examples Package ================ These examples all live in ``./examples`` in the source-distribution of RDFLib. :mod:`conjunctive_graphs` Module -------------------------------- .. automodule:: examples.conjunctive_graphs :members: :undoc-members: :show-inheritance: :mod:`custom_datatype` Module ----------------------------- .. automodule:: examples.custom_datatype :members: :undoc-members: :show-inheritance: :mod:`custom_eval` Module ------------------------- .. automodule:: examples.custom_eval :members: :undoc-members: :show-inheritance: :mod:`film` Module ------------------ .. automodule:: examples.film :members: :mod:`foafpaths` Module ----------------------- .. automodule:: examples.foafpaths :members: :undoc-members: :show-inheritance: :mod:`prepared_query` Module ---------------------------- .. automodule:: examples.prepared_query :members: :undoc-members: :show-inheritance: :mod:`resource_example` Module ------------------------------ .. automodule:: examples.resource_example :members: :undoc-members: :show-inheritance: :mod:`berkeleydb_example` Module -------------------------------- .. automodule:: examples.berkeleydb_example :members: :undoc-members: :show-inheritance: :mod:`slice` Module ------------------- .. automodule:: examples.slice :members: :undoc-members: :show-inheritance: :mod:`smushing` Module ---------------------- .. automodule:: examples.smushing :members: :undoc-members: :show-inheritance: :mod:`sparql_query_example` Module ---------------------------------- .. automodule:: examples.sparql_query_example :members: :undoc-members: :show-inheritance: :mod:`sparql_update_example` Module ----------------------------------- .. automodule:: examples.sparql_update_example :members: :undoc-members: :show-inheritance: :mod:`sparqlstore_example` Module ----------------------------------- .. automodule:: examples.sparqlstore_example :members: :undoc-members: :show-inheritance: :mod:`swap_primer` Module ------------------------- .. automodule:: examples.swap_primer :members: :undoc-members: :show-inheritance: :mod:`transitive` Module ------------------------ .. automodule:: examples.transitive :members: :undoc-members: :show-inheritance: rdflib-6.1.1/docs/conf.py000066400000000000000000000167371415774155300152420ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # rdflib documentation build configuration file, created by # sphinx-quickstart on Fri May 15 15:03:54 2009. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os import re # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.append(os.path.abspath("..")) sys.path.append(os.path.abspath("..")) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.doctest'] extensions = [ "sphinxcontrib.apidoc", "sphinx.ext.autodoc", #'sphinx.ext.autosummary', "sphinx.ext.doctest", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx.ext.ifconfig", "sphinx.ext.viewcode", ] apidoc_module_dir = "../rdflib" apidoc_output_dir = "apidocs" autodoc_default_options = {"special-members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # epydoc_mapping = { # '/_static/api/': [r'rdflib\.'], # } # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. source_encoding = "utf-8" # The master toctree document. master_doc = "index" # General information about the project. project = "rdflib" copyright = "2009 - 2021, RDFLib Team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # Find version. We have to do this because we can't import it in Python 3 until # its been automatically converted in the setup process. def find_version(filename): _version_re = re.compile(r'__version__ = "(.*)"') for line in open(filename): version_match = _version_re.match(line) if version_match: return version_match.group(1) # The full version, including alpha/beta/rc tags. release = find_version("../rdflib/__init__.py") # The short X.Y version. version = re.sub("[0-9]+\\.[0-9]\\..*", "\1", release) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. # unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ["_build", "draft"] # The reST default role (used for this markup: `text`) to use for all documents. default_role = "py:obj" # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = "armstrong" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = [ "_themes", ] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None html_logo = "_static/RDFlib.png" # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. html_favicon = "_static/RDFlib.ico" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ["_static"] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_use_modindex = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = "rdflibdoc" # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). # latex_documents = [ # ("index", "rdflib.tex", "rdflib Documentation", "RDFLib Team", "manual"), # ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_use_modindex = True # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("https://docs.python.org/3.7", None), } html_experimental_html5_writer = True needs_sphinx = "4.1.2" rdflib-6.1.1/docs/developers.rst000066400000000000000000000133131415774155300166300ustar00rootroot00000000000000.. developers: RDFLib developers guide ======================= Introduction ------------ This document describes the process and conventions to follow when developing RDFLib code. * Please be as Pythonic as possible (:pep:`8`). * Code should be formatted using `black `_ and we use Black v21.9b0, with the black.toml config file provided. * Code should also pass `flake8 `_ linting and `mypy `_ type checking. * You must supply tests for new code If you add a new cool feature, consider also adding an example in ``./examples`` Tests ----- Any new functionality being added to RDFLib _must_ have unit tests and should have doc tests supplied. Typically, you should add your functionality and new tests to a branch of RDFlib and and run all tests locally and see them pass. There are currently close to 4,000 tests with a few extra expected failures and skipped tests. We won't allow Pull Requests that break any of the existing tests. Tests that you add should show how your new feature or bug fix is doing what you say it is doing: if you remove your enhancement, your new tests should fail! Finally, please consider adding simple and more complex tests. It's good to see the basic functionality of your feature tests and then also any tricky bits or edg cases. Testing framework ~~~~~~~~~~~~~~~~~ RDFLib uses the `pytest `_ testing framework. Running tests ~~~~~~~~~~~~~ To run RDFLib's test suite with `pytest `_: .. code-block:: bash $ pip install -r requirements.txt -r requirements.dev.txt $ pytest Specific tests can be run by file name. For example: .. code-block:: bash $ pytest test/test_graph.py Writing tests ~~~~~~~~~~~~~ New tests should be written for `pytest `_ instead of for python's built-in `unittest` module as pytest provides advanced features such as parameterization and more flexibility in writing expected failure tests than `unittest`. A primer on how to write tests for pytest can be found `here `_. The existing test that use `unittest` work well with pytest, but they should ideally be updated to the pytest test-style when they are touched. Test should go into the ``test/`` directory, either into an existing test file with a name that is applicable to the test being written, or into a new test file with a name that is descriptive of the tests placed in it. Test files should be named `test_*.py` so that `pytest can discover them `_. Running static checks --------------------- Check formatting with `black `_, making sure you use our black.toml config file: .. code-block:: bash python -m black --config black.toml --check ./rdflib Check style and conventions with `flake8 `_: .. code-block:: bash python -m flake8 rdflib Check types with `mypy `_: .. code-block:: bash python -m mypy --show-error-context --show-error-codes rdflib Using tox --------------------- RDFLib has a `tox `_ config file that makes it easier to run validation on all supported python versions. .. code-block:: bash # install tox pip install tox # list tox environments that run by default tox -e # list all tox environments tox -a # run default environment for all python versions tox # run a specific environment tox -e py37 # default environment with py37 tox -e py39-mypy # mypy environment with py39 Writing documentation --------------------- We use sphinx for generating HTML docs, see :ref:`docs`. Continuous Integration ---------------------- We used Drone for CI, see: https://drone.rdflib.ashs.dev/RDFLib/rdflib If you make a pull-request to RDFLib on GitHub, Drone will automatically test your code and we will only merge code passing all tests. Please do *not* commit tests you know will fail, even if you're just pointing out a bug. If you commit such tests, flag them as expecting to fail. Compatibility ------------- RDFlib 6.0.0 release and later only support Python 3.7 and newer. RDFLib 5.0.0 maintained compatibility with Python versions 2.7, 3.4, 3.5, 3.6, 3.7. Releasing --------- Set to-be-released version number in :file:`rdflib/__init__.py` and :file:`README.md`. Check date in :file:`LICENSE`. Add :file:`CHANGELOG.md` entry. Commit this change. It's preferable make the release tag via https://github.com/RDFLib/rdflib/releases/new :: Our Tag versions aren't started with 'v', so just use a plain 5.0.0 like version. Release title is like "RDFLib 5.0.0", the description a copy of your :file:`CHANGELOG.md` entry. This gives us a nice release page like this:: https://github.com/RDFLib/rdflib/releases/tag/4.2.2 If for whatever reason you don't want to take this approach, the old one is:: Tagging the release commit with:: git tag -am 'tagged version' X.X.X When pushing, remember to do:: git push --tags No matter how you create the release tag, remember to upload tarball to pypi with:: rm -r dist/X.X.X[.-]* # delete all previous builds for this release, just in case rm -r build python setup.py sdist python setup.py bdist_wheel ls dist # upload with twine # WARNING: once uploaded can never be modified, only deleted! twine upload dist/rdflib-X.X.X[.-]* Set new dev version number in the above locations, i.e. next release `-dev`: ``5.0.1-dev`` and commit again. Tweet, email mailing list and inform members in the chat. rdflib-6.1.1/docs/docs.rst000066400000000000000000000024751415774155300154170ustar00rootroot00000000000000.. _docs: ================================ Writing RDFLib Documentation ================================ These docs are generated with Sphinx. Sphinx makes it very easy to pull in doc-strings from modules, classes, methods, etc. When writing doc-strings, special reST fields can be used to annotate parameters, return-types, etc. This makes for pretty API docs: http://sphinx-doc.org/domains.html?highlight=param#info-field-lists Building -------- To build you must have the ``sphinx`` package installed: .. code-block:: bash pip install sphinx See the documentation's full set of requirements in the ``sphinx-require,ens.txt`` file within the :file:`docs/` directory. Once you have all the requirements installed you can run this command in the rdflib root directory: .. code-block:: bash python setup.py build_sphinx Docs will be generated in :file:`build/sphinx/html/` and API documentation, generated from doc-strings, will be placed in :file:`docs/apidocs/`. API Docs -------- API Docs are automatically generated with ``sphinx-apidoc``: .. code-block:: bash sphinx-apidoc -f -d 10 -o docs/apidocs/ rdflib examples Note that ``rdflib.rst`` was manually tweaked so as to not include all imports in ``rdflib/__init__.py``. Tables ------ The tables in ``plugin_*.rst`` were generated with ``plugintable.py`` rdflib-6.1.1/docs/gettingstarted.rst000066400000000000000000000126071415774155300175150ustar00rootroot00000000000000.. _gettingstarted: =============================== Getting started with RDFLib =============================== Installation ============ RDFLib is open source and is maintained in a `GitHub `_ repository. RDFLib releases, current and previous, are listed on `PyPi `_ The best way to install RDFLib is to use ``pip`` (sudo as required): .. code-block :: bash $ pip install rdflib If you want the latest code to run, clone the master branch of the GitHub repo and use that or you can ``pip install`` directly from GitHub: .. code-block :: bash $ pip install git+https://github.com/RDFLib/rdflib.git@master#egg=rdflib Support ======= Usage support is available via questions tagged with ``[rdflib]`` on `StackOverflow `__ and development support, notifications and detailed discussion through the rdflib-dev group (mailing list): http://groups.google.com/group/rdflib-dev If you notice an bug or want to request an enhancement, please do so via our Issue Tracker in Github: ``_ How it all works ================ *The package uses various Python idioms that offer an appropriate way to introduce RDF to a Python programmer who hasn't worked with RDF before.* The primary interface that RDFLib exposes for working with RDF is a :class:`~rdflib.graph.Graph`. RDFLib graphs are un-sorted containers; they have ordinary ``set`` operations (e.g. :meth:`~rdflib.Graph.add` to add a triple) plus methods that search triples and return them in arbitrary order. RDFLib graphs also redefine certain built-in Python methods in order to behave in a predictable way: they `emulate container types `_ and are best thought of as a set of 3-item tuples ("triples", in RDF-speak): .. code-block:: text [ (subject0, predicate0, object0), (subject1, predicate1, object1), ... (subjectN, predicateN, objectN) ] A tiny example ============== .. code-block:: python from rdflib import Graph # Create a Graph g = Graph() # Parse in an RDF file hosted on the Internet g.parse("http://www.w3.org/People/Berners-Lee/card") # Loop through each triple in the graph (subj, pred, obj) for subj, pred, obj in g: # Check if there is at least one triple in the Graph if (subj, pred, obj) not in g: raise Exception("It better be!") # Print the number of "triples" in the Graph print(f"Graph g has {len(g)} statements.") # Prints: Graph g has 86 statements. # Print out the entire Graph in the RDF Turtle format print(g.serialize(format="turtle")) Here a :class:`~rdflib.graph.Graph` is created and then an RDF file online, Tim Berners-Lee's social network details, is parsed into that graph. The ``print()`` statement uses the ``len()`` function to count the number of triples in the graph. A more extensive example ======================== .. code-block:: python from rdflib import Graph, Literal, RDF, URIRef # rdflib knows about quite a few popular namespaces, like W3C ontologies, schema.org etc. from rdflib.namespace import FOAF , XSD # Create a Graph g = Graph() # Create an RDF URI node to use as the subject for multiple triples donna = URIRef("http://example.org/donna") # Add triples using store's add() method. g.add((donna, RDF.type, FOAF.Person)) g.add((donna, FOAF.nick, Literal("donna", lang="en"))) g.add((donna, FOAF.name, Literal("Donna Fales"))) g.add((donna, FOAF.mbox, URIRef("mailto:donna@example.org"))) # Add another person ed = URIRef("http://example.org/edward") # Add triples using store's add() method. g.add((ed, RDF.type, FOAF.Person)) g.add((ed, FOAF.nick, Literal("ed", datatype=XSD.string))) g.add((ed, FOAF.name, Literal("Edward Scissorhands"))) g.add((ed, FOAF.mbox, Literal("e.scissorhands@example.org", datatype=XSD.anyURI))) # Iterate over triples in store and print them out. print("--- printing raw triples ---") for s, p, o in g: print((s, p, o)) # For each foaf:Person in the store, print out their mbox property's value. print("--- printing mboxes ---") for person in g.subjects(RDF.type, FOAF.Person): for mbox in g.objects(person, FOAF.mbox): print(mbox) # Bind the FOAF namespace to a prefix for more readable output g.bind("foaf", FOAF) # print all the data in the Notation3 format print("--- printing mboxes ---") print(g.serialize(format='n3')) A SPARQL query example ====================== .. code-block:: python from rdflib import Graph # Create a Graph, pare in Internet data g = Graph().parse("http://www.w3.org/People/Berners-Lee/card") # Query the data in g using SPARQL # This query returns the 'name' of all ``foaf:Person`` instances q = """ PREFIX foaf: SELECT ?name WHERE { ?p rdf:type foaf:Person . ?p foaf:name ?name . } """ # Apply the query to the graph and iterate through results for r in g.query(q): print(r["name"]) # prints: Timothy Berners-Lee More examples ============= There are many more :doc:`examples ` in the :file:`examples` folder in the source distribution. rdflib-6.1.1/docs/index.rst000066400000000000000000000072661415774155300156010ustar00rootroot00000000000000.. rdflib documentation documentation master file ================ rdflib |release| ================ RDFLib is a pure Python package for working with `RDF `_. It contains: * **Parsers & Serializers** * for RDF/XML, N3, NTriples, N-Quads, Turtle, TriX, JSON-LD, RDFa and Microdata * **Store implementations** * for in-memory and persistent RDF storage, including remote SPARQL endpoints * **Graph interface** * to a single graph * or a conjunctive graph (multiple Named Graphs) * or a dataset of graphs * **SPARQL 1.1 implementation** * both Queries and Updates are supported Getting started --------------- If you have never used RDFLib, the following will help get you started: .. toctree:: :maxdepth: 1 gettingstarted intro_to_parsing intro_to_creating_rdf intro_to_graphs intro_to_sparql utilities Examples In depth -------- If you are familiar with RDF and are looking for details on how RDFLib handles it, these are for you: .. toctree:: :maxdepth: 1 rdf_terms namespaces_and_bindings persistence merging upgrade5to6 upgrade4to5 Reference --------- The nitty-gritty details of everything. API reference: .. toctree:: :maxdepth: 1 apidocs/modules .. toctree:: :maxdepth: 2 plugins .. * :ref:`genindex` .. * :ref:`modindex` For developers -------------- .. toctree:: :maxdepth: 1 developers docs persisting_n3_terms Source Code ----------- The rdflib source code is hosted on GitHub at ``__ where you can lodge Issues and create Pull Requests to help improve this community project! The RDFlib organisation on GitHub at ``__ maintains this package and a number of other RDF and RDFlib-related packaged that you might also find useful. Further help & Contact ---------------------- If you would like more help with using rdflib, rather than developing it, please post a question on StackOverflow using the tag ``[rdflib]``. A list of existing ``[rdflib]`` tagged questions is kept there at: * ``__ You might also like to join rdflib's dev mailing list: ``__ The chat is available at `gitter `_ or via matrix `#RDFLib_rdflib:gitter.im `_. Glossary -------- Here are a few RDF and Python terms referred to in this documentation. They are linked to wherever they occur. .. glossary:: functional property Properties than can only occur once for a resource, i.e. for any relation (triple, in RDF) ``x p y``, if ``p`` is functional, for any individual ``x``, there can be at most one individual ``y``. OWL The OWL 2 Web Ontology Language, informally OWL 2 or just OWL, is an ontology language for the Semantic Web with formally defined meaning. OWL 2 ontologies provide classes, properties, individuals, and data values and are stored as Semantic Web documents. OWL 2 ontologies can be used along with information written in RDF, and OWL 2 ontologies themselves are primarily exchanged as RDF documents. See the `RDF 1.1 Concepts and Abstract Syntax `_ for more info. RDF The Resource Description Framework (RDF) is a framework for representing information in the Web. RDF data is stored in graphs that are sets of subject-predicate-object triples, where the elements may be IRIs, blank nodes, or datatyped literals. See the `OWL 2 Web Ontology Language Document Overview `_ for more info. rdflib-6.1.1/docs/intro_to_creating_rdf.rst000066400000000000000000000154021415774155300210250ustar00rootroot00000000000000.. _intro_to_creating_rdf: ==================== Creating RDF triples ==================== Creating Nodes -------------- RDF data is a graph where the nodes are URI references, Blank Nodes or Literals. In RDFLib, these node types are represented by the classes :class:`~rdflib.term.URIRef`, :class:`~rdflib.term.BNode`, and :class:`~rdflib.term.Literal`. ``URIRefs`` and ``BNodes`` can both be thought of as resources, such a person, a company, a website, etc. * A ``BNode`` is a node where the exact URI is not known - usually a node with identity only in relation to other nodes. * A ``URIRef`` is a node where the exact URI is known. In addition to representing some subjects and predicates in RDF graphs, ``URIRef``\s are always used to represent properties/predicates * ``Literals`` represent object values, such as a name, a date, a number, etc. The most common literal values are XML data types, e.g. string, int... but custom types can be declared too Nodes can be created by the constructors of the node classes: .. code-block:: python from rdflib import URIRef, BNode, Literal bob = URIRef("http://example.org/people/Bob") linda = BNode() # a GUID is generated name = Literal("Bob") # passing a string age = Literal(24) # passing a python int height = Literal(76.5) # passing a python float Literals can be created from Python objects, this creates ``data-typed literals``. For the details on the mapping see :ref:`rdflibliterals`. For creating many ``URIRefs`` in the same ``namespace``, i.e. URIs with the same prefix, RDFLib has the :class:`rdflib.namespace.Namespace` class :: from rdflib import Namespace n = Namespace("http://example.org/people/") n.bob # == rdflib.term.URIRef("http://example.org/people/bob") n.eve # == rdflib.term.URIRef("http://example.org/people/eve") This is very useful for schemas where all properties and classes have the same URI prefix. RDFLib defines Namespaces for some common RDF/OWL schemas, including most W3C ones: .. code-block:: python from rdflib.namespace import CSVW, DC, DCAT, DCTERMS, DOAP, FOAF, ODRL2, ORG, OWL, \ PROF, PROV, RDF, RDFS, SDO, SH, SKOS, SOSA, SSN, TIME, \ VOID, XMLNS, XSD RDF.type # == rdflib.term.URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#type") FOAF.knows # == rdflib.term.URIRef("http://xmlns.com/foaf/0.1/knows") PROF.isProfileOf # == rdflib.term.URIRef("http://www.w3.org/ns/dx/prof/isProfileOf") SOSA.Sensor # == rdflib.term.URIRef("http://www.w3.org/ns/sosa/Sensor") Adding Triples to a graph ------------------------- We already saw in :doc:`intro_to_parsing`, how triples can be added from files and online locations with with the :meth:`~rdflib.graph.Graph.parse` function. Triples can also be added within Python code directly, using the :meth:`~rdflib.graph.Graph.add` function: .. automethod:: rdflib.graph.Graph.add :noindex: :meth:`~rdflib.graph.Graph.add` takes a 3-tuple (a "triple") of RDFLib nodes. Using the nodes and namespaces we defined previously: .. code-block:: python from rdflib import Graph, URIRef, Literal, BNode from rdflib.namespace import FOAF, RDF g = Graph() g.bind("foaf", FOAF) bob = URIRef("http://example.org/people/Bob") linda = BNode() # a GUID is generated name = Literal("Bob") age = Literal(24) g.add((bob, RDF.type, FOAF.Person)) g.add((bob, FOAF.name, name)) g.add((bob, FOAF.age, age)) g.add((bob, FOAF.knows, linda)) g.add((linda, RDF.type, FOAF.Person)) g.add((linda, FOAF.name, Literal("Linda"))) print(g.serialize()) outputs: .. code-block:: Turtle @prefix foaf: . @prefix xsd: . a foaf:Person ; foaf:age 24 ; foaf:knows [ a foaf:Person ; foaf:name "Linda" ] ; foaf:name "Bob" . For some properties, only one value per resource makes sense (i.e they are *functional properties*, or have a max-cardinality of 1). The :meth:`~rdflib.graph.Graph.set` method is useful for this: .. code-block:: python from rdflib import Graph, URIRef, Literal from rdflib.namespace import FOAF g = Graph() bob = URIRef("http://example.org/people/Bob") g.add((bob, FOAF.age, Literal(42))) print(f"Bob is {g.value(bob, FOAF.age)}") # prints: Bob is 42 g.set((bob, FOAF.age, Literal(43))) # replaces 42 set above print(f"Bob is now {g.value(bob, FOAF.age)}") # prints: Bob is now 43 :meth:`rdflib.graph.Graph.value` is the matching query method. It will return a single value for a property, optionally raising an exception if there are more. You can also add triples by combining entire graphs, see :ref:`graph-setops`. Removing Triples ---------------- Similarly, triples can be removed by a call to :meth:`~rdflib.graph.Graph.remove`: .. automethod:: rdflib.graph.Graph.remove :noindex: When removing, it is possible to leave parts of the triple unspecified (i.e. passing ``None``), this will remove all matching triples: .. code-block:: python g.remove((bob, None, None)) # remove all triples about bob An example ---------- LiveJournal produces FOAF data for their users, but they seem to use ``foaf:member_name`` for a person's full name but ``foaf:member_name`` isn't in FOAF's namespace and perhaps they should have used ``foaf:name`` To retrieve some LiveJournal data, add a ``foaf:name`` for every ``foaf:member_name`` and then remove the ``foaf:member_name`` values to ensure the data actually aligns with other FOAF data, we could do this: .. code-block:: python from rdflib import Graph from rdflib.namespace import FOAF g = Graph() # get the data g.parse("http://danbri.livejournal.com/data/foaf") # for every foaf:member_name, add foaf:name and remove foaf:member_name for s, p, o in g.triples((None, FOAF['member_name'], None)): g.add((s, FOAF['name'], o)) g.remove((s, FOAF['member_name'], o)) .. note:: Since rdflib 5.0.0, using ``foaf:member_name`` is somewhat prevented in RDFlib since FOAF is declared as a :meth:`~rdflib.namespace.ClosedNamespace` class instance that has a closed set of members and ``foaf:member_name`` isn't one of them! If LiveJournal had used RDFlib 5.0.0, an error would have been raised for ``foaf:member_name`` when the triple was created. Creating Containers & Collections --------------------------------- There are two convenience classes for RDF Containers & Collections which you can use instead of declaring each triple of a Containers or a Collections individually: * :meth:`~rdflib.container.Container` (also ``Bag``, ``Seq`` & ``Alt``) and * :meth:`~rdflib.collection.Collection` See their documentation for how. rdflib-6.1.1/docs/intro_to_graphs.rst000066400000000000000000000112431415774155300176610ustar00rootroot00000000000000.. _rdflib_graph: Navigating Graphs ================= Navigating Graphs ================= An RDF Graph is a set of RDF triples, and we try to mirror exactly this in RDFLib. The Python :meth:`~rdflib.graph.Graph` tries to emulate a container type. Graphs as Iterators ------------------- RDFLib graphs override :meth:`~rdflib.graph.Graph.__iter__` in order to support iteration over the contained triples: .. code-block:: python for s, p, o in someGraph: if not (s, p, o) in someGraph: raise Exception("Iterator / Container Protocols are Broken!!") This loop iterates through all the subjects(s), predicates (p) & objects (o) in ``someGraph``. Contains check -------------- Graphs implement :meth:`~rdflib.graph.Graph.__contains__`, so you can check if a triple is in a graph with a ``triple in graph`` syntax: .. code-block:: python from rdflib import URIRef from rdflib.namespace import RDF bob = URIRef("http://example.org/people/bob") if (bob, RDF.type, FOAF.Person) in graph: print("This graph knows that Bob is a person!") Note that this triple does not have to be completely bound: .. code-block:: python if (bob, None, None) in graph: print("This graph contains triples about Bob!") .. _graph-setops: Set Operations on RDFLib Graphs ------------------------------- Graphs override several pythons operators: :meth:`~rdflib.graph.Graph.__iadd__`, :meth:`~rdflib.graph.Graph.__isub__`, etc. This supports addition, subtraction and other set-operations on Graphs: ============ ============================================================= operation effect ============ ============================================================= ``G1 + G2`` return new graph with union (triples on both) ``G1 += G2`` in place union / addition ``G1 - G2`` return new graph with difference (triples in G1, not in G2) ``G1 -= G2`` in place difference / subtraction ``G1 & G2`` intersection (triples in both graphs) ``G1 ^ G2`` xor (triples in either G1 or G2, but not in both) ============ ============================================================= .. warning:: Set-operations on graphs assume Blank Nodes are shared between graphs. This may or may not be what you want. See :doc:`merging` for details. Basic Triple Matching --------------------- Instead of iterating through all triples, RDFLib graphs support basic triple pattern matching with a :meth:`~rdflib.graph.Graph.triples` function. This function is a generator of triples that match a pattern given by arguments, i.e. arguments restrict the triples that are returned. Terms that are :data:`None` are treated as a wildcard. For example: .. code-block:: python g.load("some_foaf.ttl") # find all subjects (s) of type (rdf:type) person (foaf:Person) for s, p, o in g.triples((None, RDF.type, FOAF.Person)): print(f"{s} is a person") # find all subjects of any type for s, p, o in g.triples((None, RDF.type, None)): print(f"{s} is a {o}") # create a graph bobgraph = Graph() # add all triples with subject 'bob' bobgraph += g.triples((bob, None, None)) If you are not interested in whole triples, you can get only the bits you want with the methods :meth:`~rdflib.graph.Graph.objects`, :meth:`~rdflib.graph.Graph.subjects`, :meth:`~rdflib.graph.Graph.predicates`, :meth:`~rdflib.graph.Graph.predicate_objects`, etc. Each take parameters for the components of the triple to constraint: .. code-block:: python for person in g.subjects(RDF.type, FOAF.Person): print("{} is a person".format(person)) Finally, for some properties, only one value per resource makes sense (i.e they are *functional properties*, or have a max-cardinality of 1). The :meth:`~rdflib.graph.Graph.value` method is useful for this, as it returns just a single node, not a generator: .. code-block:: python # get any name of bob name = g.value(bob, FOAF.name) # get the one person that knows bob and raise an exception if more are found mbox = g.value(predicate = FOAF.name, object=bob, any=False) :class:`~rdflib.graph.Graph` methods for accessing triples ----------------------------------------------------------- Here is a list of all convenience methods for querying Graphs: .. automethod:: rdflib.graph.Graph.triples :noindex: .. automethod:: rdflib.graph.Graph.value :noindex: .. automethod:: rdflib.graph.Graph.subjects :noindex: .. automethod:: rdflib.graph.Graph.objects :noindex: .. automethod:: rdflib.graph.Graph.predicates :noindex: .. automethod:: rdflib.graph.Graph.subject_objects :noindex: .. automethod:: rdflib.graph.Graph.subject_predicates :noindex: .. automethod:: rdflib.graph.Graph.predicate_objects :noindex: rdflib-6.1.1/docs/intro_to_parsing.rst000066400000000000000000000122071415774155300200410ustar00rootroot00000000000000.. _intro_to_parsing: ====================== Loading and saving RDF ====================== Reading RDF files ----------------- RDF data can be represented using various syntaxes (``turtle``, ``rdf/xml``, ``n3``, ``n-triples``, ``trix``, ``JSON-LD``, etc.). The simplest format is ``ntriples``, which is a triple-per-line format. Create the file :file:`demo.nt` in the current directory with these two lines in it: .. code-block:: Turtle . "Hello World" . On line 1 this file says "drewp is a FOAF Person:. On line 2 it says "drep says "Hello World"". RDFLib can guess what format the file is by the file ending (".nt" is commonly used for n-triples) so you can just use :meth:`~rdflib.graph.Graph.parse` to read in the file. If the file had a non-standard RDF file ending, you could set the keyword-parameter ``format`` to specify either an Internet Media Type or the format name (a :doc:`list of available parsers ` is available). In an interactive python interpreter, try this: .. code-block:: python from rdflib import Graph g = Graph() g.parse("demo.nt") print(len(g)) # prints: 2 import pprint for stmt in g: pprint.pprint(stmt) # prints: # (rdflib.term.URIRef('http://example.com/drewp'), # rdflib.term.URIRef('http://example.com/says'), # rdflib.term.Literal('Hello World')) # (rdflib.term.URIRef('http://example.com/drewp'), # rdflib.term.URIRef('http://www.w3.org/1999/02/22-rdf-syntax-ns#type'), # rdflib.term.URIRef('http://xmlns.com/foaf/0.1/Person')) The final lines show how RDFLib represents the two statements in the file: the statements themselves are just length-3 tuples ("triples") and the subjects, predicates, and objects of the triples are all rdflib types. Reading remote RDF ------------------ Reading graphs from the Internet is easy: .. code-block:: python from rdflib import Graph g = Graph() g.parse("http://www.w3.org/People/Berners-Lee/card") print(len(g)) # prints: 86 :func:`rdflib.Graph.parse` can process local files, remote data via a URL, as in this example, or RDF data in a string (using the ``data`` parameter). Saving RDF ---------- To store a graph in a file, use the :func:`rdflib.Graph.serialize` function: .. code-block:: python from rdflib import Graph g = Graph() g.parse("http://www.w3.org/People/Berners-Lee/card") g.serialize(destination="tbl.ttl") This parses data from http://www.w3.org/People/Berners-Lee/card and stores it in a file ``tbl.ttl`` in this directory using the turtle format, which is the default RDF serialization (as of rdflib 6.0.0). To read the same data and to save it as an RDF/XML format string in the variable ``v``, do this: .. code-block:: python from rdflib import Graph g = Graph() g.parse("http://www.w3.org/People/Berners-Lee/card") v = g.serialize(format="xml") The following table lists the RDF formats you can serialize data to with rdflib, out of the box, and the ``format=KEYWORD`` keyword used to reference them within ``serialize()``: .. csv-table:: :header: "RDF Format", "Keyword", "Notes" "Turtle", "turtle, ttl or turtle2", "turtle2 is just turtle with more spacing & linebreaks" "RDF/XML", "xml or pretty-xml", "Was the default format, rdflib < 6.0.0" "JSON-LD", "json-ld", "There are further options for compact syntax and other JSON-LD variants" "N-Triples", "ntriples, nt or nt11", "nt11 is exactly like nt, only utf8 encoded" "Notation-3","n3", "N3 is a superset of Turtle that also caters for rules and a few other things" "Trig", "trig", "Turtle-like format for RDF triples + context (RDF quads) and thus multiple graphs" "Trix", "trix", "RDF/XML-like format for RDF quads" "N-Quads", "nquads", "N-Triples-like format for RDF quads" Working with multi-graphs ------------------------- To read and query multi-graphs, that is RDF data that is context-aware, you need to use rdflib's :class:`rdflib.ConjunctiveGraph` or :class:`rdflib.Dataset` class. These are extensions to :class:`rdflib.Graph` that know all about quads (triples + graph IDs). If you had this multi-graph data file (in the ``trig`` format, using new-style ``PREFIX`` statement (not the older ``@prefix``): .. code-block:: Turtle PREFIX eg: PREFIX foaf: eg:graph-1 { eg:drewp a foaf:Person . eg:drewp eg:says "Hello World" . } eg:graph-2 { eg:nick a foaf:Person . eg:nick eg:says "Hi World" . } You could parse the file and query it like this: .. code-block:: python from rdflib import Dataset from rdflib.namespace import RDF g = Dataset() g.parse("demo.trig") for s, p, o, g in g.quads((None, RDF.type, None, None)): print(s, g) This will print out: .. code-block:: http://example.com/person/drewp http://example.com/person/graph-1 http://example.com/person/nick http://example.com/person/graph-2 rdflib-6.1.1/docs/intro_to_sparql.rst000066400000000000000000000132011415774155300176730ustar00rootroot00000000000000.. _intro_to_using_sparql: ==================== Querying with SPARQL ==================== Run a Query ^^^^^^^^^^^ The RDFLib comes with an implementation of the `SPARQL 1.1 Query `_ and `SPARQL 1.1 Update `_ query languages. Queries can be evaluated against a graph with the :meth:`rdflib.graph.Graph.query` method, and updates with :meth:`rdflib.graph.Graph.update`. The query method returns a :class:`rdflib.query.Result` instance. For SELECT queries, iterating over this returns :class:`rdflib.query.ResultRow` instances, each containing a set of variable bindings. For ``CONSTRUCT``/``DESCRIBE`` queries, iterating over the result object gives the triples. For ``ASK`` queries, iterating will yield the single boolean answer, or evaluating the result object in a boolean-context (i.e. ``bool(result)``) For example... .. code-block:: python import rdflib g = rdflib.Graph() g.parse("http://danbri.org/foaf.rdf#") knows_query = """ SELECT DISTINCT ?aname ?bname WHERE { ?a foaf:knows ?b . ?a foaf:name ?aname . ?b foaf:name ?bname . }""" qres = g.query(knows_query) for row in qres: print(f"{row.aname} knows {row.bname}") The results are tuples of values in the same order as your ``SELECT`` arguments. Alternatively, the values can be accessed by variable name, either as attributes, or as items, e.g. ``row.b`` and ``row["b"]`` are equivalent. The above, given the appropriate data, would print something like: .. code-block:: text Timothy Berners-Lee knows Edd Dumbill Timothy Berners-Lee knows Jennifer Golbeck Timothy Berners-Lee knows Nicholas Gibbins ... As an alternative to using ``SPARQL``\s ``PREFIX``, namespace bindings can be passed in with the ``initNs`` kwarg, see :doc:`namespaces_and_bindings`. Variables can also be pre-bound, using the ``initBindings`` kwarg which can pass in a ``dict`` of initial bindings. This is particularly useful for prepared queries, as described below. Update Queries ^^^^^^^^^^^^^^ Update queries are performed just like reading queries but using the :meth:`rdflib.graph.Graph.update` method. An example: .. code-block:: python from rdflib import Graph # Create a Graph, add in some test data g = Graph() g.parse( data=""" a . a . """, format="turtle" ) # Select all the things (s) that are of type (rdf:type) c: qres = g.query("""SELECT ?s WHERE { ?s a }""") for row in qres: print(f"{row.s}") # prints: # x: # y: # Add in a new triple using SPATQL UPDATE g.update("""INSERT DATA { a }""") # Select all the things (s) that are of type (rdf:type) c: qres = g.query("""SELECT ?s WHERE { ?s a }""") print("After update:") for row in qres: print(f"{row.s}") # prints: # x: # y: # z: # Change type of from to g.update(""" DELETE { a } INSERT { a } WHERE { a } """) print("After second update:") qres = g.query("""SELECT ?s ?o WHERE { ?s a ?o }""") for row in qres: print(f"{row.s} a {row.o}") # prints: # x: a c: # z: a c: # y: a d: Querying a Remote Service ^^^^^^^^^^^^^^^^^^^^^^^^^ The ``SERVICE`` keyword of SPARQL 1.1 can send a query to a remote SPARQL endpoint. .. code-block:: python import rdflib g = rdflib.Graph() qres = g.query( """ SELECT ?s WHERE { SERVICE { ?s a ?o . } } LIMIT 3 """ ) for row in qres: print(row.s) This example sends a query to `DBPedia `_'s SPARQL endpoint service so that it can run the query and then send back the result: .. code-block:: text Prepared Queries ^^^^^^^^^^^^^^^^ RDFLib lets you *prepare* queries before execution, this saves re-parsing and translating the query into SPARQL Algebra each time. The method :meth:`rdflib.plugins.sparql.prepareQuery` takes a query as a string and will return a :class:`rdflib.plugins.sparql.sparql.Query` object. This can then be passed to the :meth:`rdflib.graph.Graph.query` method. The ``initBindings`` kwarg can be used to pass in a ``dict`` of initial bindings: .. code-block:: python q = prepareQuery( "SELECT ?s WHERE { ?person foaf:knows ?s .}", initNs = { "foaf": FOAF } ) g = rdflib.Graph() g.load("foaf.rdf") tim = rdflib.URIRef("http://www.w3.org/People/Berners-Lee/card#i") for row in g.query(q, initBindings={'person': tim}): print(row) Custom Evaluation Functions ^^^^^^^^^^^^^^^^^^^^^^^^^^^ For experts, it is possible to override how bits of SPARQL algebra are evaluated. By using the `setuptools entry-point `_ ``rdf.plugins.sparqleval``, or simply adding to an entry to :data:`rdflib.plugins.sparql.CUSTOM_EVALS`, a custom function can be registered. The function will be called for each algebra component and may raise ``NotImplementedError`` to indicate that this part should be handled by the default implementation. See :file:`examples/custom_eval.py` rdflib-6.1.1/docs/merging.rst000066400000000000000000000044021415774155300161070ustar00rootroot00000000000000.. _merging_graphs: ============== Merging graphs ============== Graphs share blank nodes only if they are derived from graphs described by documents or other structures (such as an RDF dataset) that explicitly provide for the sharing of blank nodes between different RDF graphs. Simply downloading a web document does not mean that the blank nodes in a resulting RDF graph are the same as the blank nodes coming from other downloads of the same document or from the same RDF source. RDF applications which manipulate concrete syntaxes for RDF which use blank node identifiers should take care to keep track of the identity of the blank nodes they identify. Blank node identifiers often have a local scope, so when RDF from different sources is combined, identifiers may have to be changed in order to avoid accidental conflation of distinct blank nodes. For example, two documents may both use the blank node identifier "_:x" to identify a blank node, but unless these documents are in a shared identifier scope or are derived from a common source, the occurrences of "_:x" in one document will identify a different blank node than the one in the graph described by the other document. When graphs are formed by combining RDF from multiple sources, it may be necessary to standardize apart the blank node identifiers by replacing them by others which do not occur in the other document(s). *(copied directly from https://www.w3.org/TR/rdf11-mt/#shared-blank-nodes-unions-and-merges)* In RDFLib, blank nodes are given unique IDs when parsing, so graph merging can be done by simply reading several files into the same graph:: from rdflib import Graph graph = Graph() graph.parse(input1) graph.parse(input2) ``graph`` now contains the merged graph of ``input1`` and ``input2``. .. note:: However, the set-theoretic graph operations in RDFLib are assumed to be performed in sub-graphs of some larger data-base (for instance, in the context of a :class:`~rdflib.graph.ConjunctiveGraph`) and assume shared blank node IDs, and therefore do NOT do *correct* merging, i.e.:: from rdflib import Graph g1 = Graph() g1.parse(input1) g2 = Graph() g2.parse(input2) graph = g1 + g2 May cause unwanted collisions of blank-nodes in ``graph``. rdflib-6.1.1/docs/namespaces_and_bindings.rst000066400000000000000000000070531415774155300213020ustar00rootroot00000000000000.. _namespaces_and_bindings: Namespaces and Bindings ======================= Namespaces and Bindings ======================= RDFLib provides several short-cuts to working with many URIs in the same namespace. The :mod:`rdflib.namespace` defines the :class:`rdflib.namespace.Namespace` class which lets you easily create URIs in a namespace:: from rdflib import Namespace n = Namespace("http://example.org/") n.Person # as attribute # = rdflib.term.URIRef("http://example.org/Person") n['first%20name'] # as item - for things that are not valid python identifiers # = rdflib.term.URIRef("http://example.org/first%20name") Note that if a name string is valid for use in an RDF namespace but not valid as a Python identifier, such as '1234', it must be addressed with the "item" syntax (using the "attribute" syntax will raise a Syntax Error). The ``namespace`` module also defines many common namespaces such as RDF, RDFS, OWL, FOAF, SKOS, PROF, etc. Namespaces can also be associated with prefixes, in a :class:`rdflib.namespace.NamespaceManager`, i.e. using ``foaf`` for ``http://xmlns.com/foaf/0.1/``. Each RDFLib graph has a :attr:`~rdflib.graph.Graph.namespace_manager` that keeps a list of namespace to prefix mappings. The namespace manager is populated when reading in RDF, and these prefixes are used when serialising RDF, or when parsing SPARQL queries. Additional prefixes can be bound with the :meth:`rdflib.graph.bind` method. NamespaceManager ---------------- Each graph comes with a `NamespaceManager`__ instance in the `namespace_manager` field; you can use the `bind` method of this instance to bind a prefix to a namespace URI:: myGraph.namespace_manager.bind('prefix', URIRef('scheme:my-namespace-uri:')) myGraph.namespace_manager.bind('owl', OWL_NS, override=False) It has a method to normalize a given url : myGraph.namespace_manager.normalizeUri(t) For simple output, or simple serialisation, you often want a nice readable representation of a term. All terms have a ``.n3(namespace_manager = None)`` method, which will return a suitable N3 format:: >>> from rdflib import Graph, URIRef, Literal, BNode >>> from rdflib.namespace import FOAF, NamespaceManager >>> person = URIRef("http://xmlns.com/foaf/0.1/Person") >>> person.n3() '' >>> g = Graph() >>> g.bind("foaf", FOAF) >>> person.n3(g.namespace_manager) 'foaf:Person' >>> l = Literal(2) >>> l.n3() '"2"^^' >>> l.n3(g.namespace_manager) '"2"^^xsd:integer' The namespace manage also has a useful method compute_qname g.namespace_manager.compute_qname(x) which takes an url and decomposes it into the parts:: self.assertEqual(g.compute_qname(URIRef("http://foo/bar#baz")), ("ns2", URIRef("http://foo/bar#"), "baz")) __ http://rdflib.net/rdflib-2.4.0/html/public/rdflib.syntax.NamespaceManager.NamespaceManager-class.html Namespaces in SPARQL Queries ---------------------------- The ``initNs`` argument supplied to :meth:`~rdflib.graph.Graph.query` is a dictionary of namespaces to be expanded in the query string. If you pass no ``initNs`` argument, the namespaces registered with the graphs namespace_manager are used:: from rdflib.namespace import FOAF graph.query('SELECT * WHERE { ?p a foaf:Person }', initNs={ 'foaf': FOAF }) In order to use an empty prefix (e.g. ``?a :knows ?b``), use a ``PREFIX`` directive with no prefix in the SPARQL query to set a default namespace: .. code-block:: sparql PREFIX : rdflib-6.1.1/docs/persistence.rst000066400000000000000000000056101415774155300170050ustar00rootroot00000000000000.. _persistence: Persistence =========== Persistence =========== RDFLib provides an :class:`abstracted Store API ` for persistence of RDF and Notation 3. The :class:`~rdflib.graph.Graph` class works with instances of this API (as the first argument to its constructor) for triple-based management of an RDF store including: garbage collection, transaction management, update, pattern matching, removal, length, and database management (:meth:`~rdflib.graph.Graph.open` / :meth:`~rdflib.graph.Graph.close` / :meth:`~rdflib.graph.Graph.destroy`). Additional persistence mechanisms can be supported by implementing this API for a different store. Stores currently shipped with core RDFLib ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * :class:`Memory ` - not persistent! * :class:`~rdflib.plugins.stores.berkeleydb.BerkeleyDB` - on disk persistence via Python's `berkeleydb package `_ * :class:`~rdflib.plugins.stores.sparqlstore.SPARQLStore` - a read-only wrapper around a remote SPARQL Query endpoint * :class:`~rdflib.plugins.stores.sparqlstore.SPARQLUpdateStore` - a read-write wrapper around a remote SPARQL query/update endpoint pair Usage ^^^^^ In most cases, passing the name of the store to the Graph constructor is enough: .. code-block:: python from rdflib import Graph graph = Graph(store='BerkeleyDB') Most stores offering on-disk persistence will need to be opened before reading or writing. When peristing a triplestore, rather than a ConjuntiveGraph quadstore, you need to specify an identifier with which you can open the graph: .. code-block:: python graph = Graph('BerkeleyDB', identifier='mygraph') # first time create the store: graph.open('/home/user/data/myRDFLibStore', create=True) # work with the graph: data = """ PREFIX : :a :b :c . :d :e :f . :d :g :h . """ graph.parse(data=data, format="ttl") # when done! graph.close() When done, :meth:`~rdflib.graph.Graph.close` must be called to free the resources associated with the store. Additional store plugins ^^^^^^^^^^^^^^^^^^^^^^^^ More store implementations are available in RDFLib extension projects: * `rdflib-sqlalchemy `_, which supports stored on a wide-variety of RDBMs backends, * `rdflib-leveldb `_ - a store on to of Google's `LevelDB `_ key-value store. * `rdflib-kyotocabinet `_ - a store on to of the `Kyoto Cabinet `_ key-value store. Example ^^^^^^^ * :mod:`examples.berkeleydb_example` contains an example for using a BerkeleyDB store. * :mod:`examples.sparqlstore_example` contains an example for using a SPARQLStore. rdflib-6.1.1/docs/persisting_n3_terms.rst000066400000000000000000000116661415774155300204720ustar00rootroot00000000000000.. _persisting_n3_terms: =========================== Persisting Notation 3 Terms =========================== Using N3 Syntax for Persistence ------------------------------- Blank Nodes, Literals, URI References, and Variables can be distinguished in persistence by relying on Notation 3 syntax convention. All URI References can be expanded and persisted as: .. code-block:: text <..URI..> All Literals can be expanded and persisted as: .. code-block:: text "..value.."@lang or "..value.."^^dtype_uri .. note:: ``@lang`` is a language tag and ``^^dtype_uri`` is the URI of a data type associated with the Literal Blank Nodes can be expanded and persisted as: .. code-block:: text _:Id .. note:: where Id is an identifier as determined by skolemization. Skolemization is a syntactic transformation routinely used in automatic inference systems in which existential variables are replaced by 'new' functions - function names not used elsewhere - applied to any enclosing universal variables. In RDF, Skolemization amounts to replacing every blank node in a graph by a 'new' name, i.e. a URI reference which is guaranteed to not occur anywhere else. In effect, it gives 'arbitrary' names to the anonymous entities whose existence was asserted by the use of blank nodes: the arbitrariness of the names ensures that nothing can be inferred that would not follow from the bare assertion of existence represented by the blank node. (Using a literal would not do. Literals are never 'new' in the required sense.) Variables can be persisted as they appear in their serialization ``(?varName)`` - since they only need be unique within their scope (the context of their associated statements) These syntactic conventions can facilitate term round-tripping. Variables by Scope ------------------ Would an interface be needed in order to facilitate a quick way to aggregate all the variables in a scope (given by a formula identifier)? An interface such as: .. code-block:: python def variables(formula_identifier) The Need to Skolemize Formula Identifiers ----------------------------------------- It would seem reasonable to assume that a formula-aware store would assign Blank Node identifiers as names of formulae that appear in a N3 serialization. So for instance, the following bit of N3: .. code-block:: text {?x a :N3Programmer} => {?x :has :Migrane} Could be interpreted as the assertion of the following statement: .. code-block:: text _:a log:implies _:b However, how are ``_:a`` and ``_:b`` distinguished from other Blank Nodes? A formula-aware store would be expected to persist the first set of statements as quoted statements in a formula named ``_:a`` and the second set as quoted statements in a formula named ``_:b``, but it would not be cost-effective for a serializer to have to query the store for all statements in a context named ``_:a`` in order to determine if ``_:a`` was associated with a formula (so that it could be serialized properly). Relying on ``log:Formula`` Membership ------------------------------------- The store could rely on explicit ``log:Formula`` membership (via ``rdf:type`` statements) to model the distinction of Blank Nodes associated with formulae. However, would these statements be expected from an N3 parser or known implicitly by the store? i.e., would all such Blank Nodes match the following pattern: .. code-block:: text ?formula rdf:type log:Formula Relying on an Explicit Interface -------------------------------- A formula-aware store could also support the persistence of this distinction by implementing a method that returns an iterator over all the formulae in the store: .. code-block:: python def formulae(triple=None) This function would return all the Blank Node identifiers assigned to formulae or just those that contain statements matching the given triple pattern and would be the way a serializer determines if a term refers to a formula (in order to properly serializer it). How much would such an interface reduce the need to model formulae terms as first class objects (perhaps to be returned by the :meth:`~rdflib.Graph.triple` function)? Would it be more useful for the :class:`~rdflib.Graph` (or the store itself) to return a Context object in place of a formula term (using the formulae interface to make this determination)? Conversely, would these interfaces (variables and formulae) be considered optimizations only since you have the distinction by the kinds of terms triples returns (which would be expanded to include variables and formulae)? Persisting Formula Identifiers ------------------------------ This is the most straight forward way to maintain this distinction - without relying on extra interfaces. Formula identifiers could be persisted distinctly from other terms by using the following notation: .. code-block:: text {_:bnode} or {<.. URI ..>} This would facilitate their persistence round-trip - same as the other terms that rely on N3 syntax to distinguish between each other. rdflib-6.1.1/docs/plugin_parsers.rst000066400000000000000000000037571415774155300175300ustar00rootroot00000000000000.. _plugin_parsers: Plugin parsers ============== Plugin parsers ============== These serializers are available in default RDFLib, you can use them by passing the name to graph's :meth:`~rdflib.graph.Graph.parse` method:: graph.parse(my_url, format='n3') The ``html`` parser will auto-detect RDFa, HTurtle or Microdata. It is also possible to pass a mime-type for the ``format`` parameter:: graph.parse(my_url, format='application/rdf+xml') If you are not sure what format your file will be, you can use :func:`rdflib.util.guess_format` which will guess based on the file extension. ========= ==================================================================== Name Class ========= ==================================================================== json-ld :class:`~rdflib.plugins.parsers.jsonld.JsonLDParser` hext :class:`~rdflib.plugins.parsers.hext.HextuplesParser` html :class:`~rdflib.plugins.parsers.structureddata.StructuredDataParser` n3 :class:`~rdflib.plugins.parsers.notation3.N3Parser` nquads :class:`~rdflib.plugins.parsers.nquads.NQuadsParser` nt :class:`~rdflib.plugins.parsers.ntriples.NTParser` trix :class:`~rdflib.plugins.parsers.trix.TriXParser` turtle :class:`~rdflib.plugins.parsers.notation3.TurtleParser` xml :class:`~rdflib.plugins.parsers.rdfxml.RDFXMLParser` ========= ==================================================================== Multi-graph IDs --------------- Note that for correct parsing of multi-graph data, e.g. Trig, HexT, etc., into a ``ConjunctiveGraph`` or a ``Dataset``, as opposed to a context-unaware ``Graph``, you will need to set the ``publicID`` of the ``ConjunctiveGraph`` a ``Dataset`` to the identifier of the ``default_context`` (default graph), for example:: d = Dataset() d.parse( data=""" ... """, format="trig", publicID=d.default_context.identifier ) (from the file tests/test_serializer_hext.py) rdflib-6.1.1/docs/plugin_query_results.rst000066400000000000000000000030221415774155300207600ustar00rootroot00000000000000.. _plugin_query_results: Plugin query results ==================== Plugin query results ==================== Plugins for reading and writing of (SPARQL) :class:`~rdflib.query.QueryResult` - pass ``name`` to either :meth:`~rdflib.query.QueryResult.parse` or :meth:`~rdflib.query.QueryResult.serialize` Parsers ------- ==== ==================================================================== Name Class ==== ==================================================================== csv :class:`~rdflib.plugins.sparql.results.csvresults.CSVResultParser` json :class:`~rdflib.plugins.sparql.results.jsonresults.JSONResultParser` tsv :class:`~rdflib.plugins.sparql.results.tsvresults.TSVResultParser` xml :class:`~rdflib.plugins.sparql.results.xmlresults.XMLResultParser` ==== ==================================================================== Serializers ----------- ==== ======================================================================== Name Class ==== ======================================================================== csv :class:`~rdflib.plugins.sparql.results.csvresults.CSVResultSerializer` json :class:`~rdflib.plugins.sparql.results.jsonresults.JSONResultSerializer` txt :class:`~rdflib.plugins.sparql.results.txtresults.TXTResultSerializer` xml :class:`~rdflib.plugins.sparql.results.xmlresults.XMLResultSerializer` ==== ======================================================================== rdflib-6.1.1/docs/plugin_serializers.rst000066400000000000000000000037701415774155300204000ustar00rootroot00000000000000.. _plugin_serializers: Plugin serializers ================== Plugin serializers ================== These serializers are available in default RDFLib, you can use them by passing the name to a graph's :meth:`~rdflib.graph.Graph.serialize` method:: print graph.serialize(format='n3') It is also possible to pass a mime-type for the ``format`` parameter:: graph.serialize(my_url, format='application/rdf+xml') ========== =============================================================== Name Class ========== =============================================================== json-ld :class:`~rdflib.plugins.serializers.jsonld.JsonLDSerializer` n3 :class:`~rdflib.plugins.serializers.n3.N3Serializer` nquads :class:`~rdflib.plugins.serializers.nquads.NQuadsSerializer` nt :class:`~rdflib.plugins.serializers.nt.NTSerializer` hext :class:`~rdflib.plugins.serializers.hext.HextuplesSerializer` pretty-xml :class:`~rdflib.plugins.serializers.rdfxml.PrettyXMLSerializer` trig :class:`~rdflib.plugins.serializers.trig.TrigSerializer` trix :class:`~rdflib.plugins.serializers.trix.TriXSerializer` turtle :class:`~rdflib.plugins.serializers.turtle.TurtleSerializer` longturtle :class:`~rdflib.plugins.serializers.turtle.LongTurtleSerializer` xml :class:`~rdflib.plugins.serializers.rdfxml.XMLSerializer` ========== =============================================================== JSON-LD ------- JSON-LD - 'json-ld' - has been incorprated in rdflib since v6.0.0. HexTuples --------- The HexTuples Serializer - 'hext' - uses the HexTuples format defined at https://github.com/ontola/hextuples. For serialization of non-context-aware data sources, e.g. a single ``Graph``, the 'graph' field (6th variable in the Hextuple) will be an empty string. For context-aware (multi-graph) serialization, the 'graph' field of the default graph will be an empty string and the values for other graphs will be Blank Node IDs or IRIs. rdflib-6.1.1/docs/plugin_stores.rst000066400000000000000000000063041415774155300173570ustar00rootroot00000000000000.. _plugin_stores: Plugin stores ============= Plugin stores ============= Built In -------- The following Stores are contained within the rdflib core package: ================= ============================================================ Name Class ================= ============================================================ Auditable :class:`~rdflib.plugins.stores.auditable.AuditableStore` Concurrent :class:`~rdflib.plugins.stores.concurrent.ConcurrentStore` SimpleMemory :class:`~rdflib.plugins.stores.memory.SimpleMemory` Memory :class:`~rdflib.plugins.stores.memory.Memory` SPARQLStore :class:`~rdflib.plugins.stores.sparqlstore.SPARQLStore` SPARQLUpdateStore :class:`~rdflib.plugins.stores.sparqlstore.SPARQLUpdateStore` BerkeleyDB :class:`~rdflib.plugins.stores.berkeleydb.BerkeleyDB` default :class:`~rdflib.plugins.stores.memory.Memory` ================= ============================================================ External -------- The following Stores are defined externally to rdflib's core package, so look to their documentation elsewhere for specific details of use. ================= ==================================================== ============================================================================================= Name Repository Notes ================= ==================================================== ============================================================================================= SQLAlchemy ``_ An SQLAlchemy-backed, formula-aware RDFLib Store. Tested dialects are: SQLite, MySQL & PostgreSQL leveldb ``_ An adaptation of RDFLib BerkeleyDB Store’s key-value approach, using LevelDB as a back-end Kyoto Cabinet ``_ An adaptation of RDFLib BerkeleyDB Store’s key-value approach, using Kyoto Cabinet as a back-end HDT ``_ A Store back-end for rdflib to allow for reading and querying `HDT `_ documents Oxigraph ``_ Works with the `Pyoxigraph `_ Python graph database library ================= ==================================================== ============================================================================================= _If you have, or know of a Store implementation and would like it listed here, please submit a Pull Request!_ Use --- You can use these stores like this: .. code-block:: python from rdflib import Graph # use the default memory Store graph = Graph() # use the BerkeleyDB Store graph = Graph(store="BerkeleyDB") In some cases, you must explicitly _open_ and _close_ a store, for example: .. code-block:: python from rdflib import Graph # use the BerkeleyDB Store graph = Graph(store="BerkeleyDB") graph.open("/some/folder/location") # do things ... graph.close() rdflib-6.1.1/docs/plugins.rst000066400000000000000000000007651415774155300161500ustar00rootroot00000000000000 Plugins ======= .. image:: /_static/plugins-diagram.* :alt: rdflib plugin "architecture" :width: 450px :target: _static/plugins-diagram.svg Many parts of RDFLib are extensible with plugins, `see setuptools' 'Creating and discovering plugins' `_. These pages list the plugins included in RDFLib core. .. toctree:: :maxdepth: 1 plugin_parsers plugin_serializers plugin_stores plugin_query_results rdflib-6.1.1/docs/plugintable.py000066400000000000000000000011661415774155300166110ustar00rootroot00000000000000""" Crappy utility for generating Sphinx tables for rdflib plugins """ import sys from rdflib.plugin import _plugins cls = sys.argv[1] p = {} for (name, kind), plugin in _plugins.items(): if "/" in name: continue # skip duplicate entries for mimetypes if cls == kind.__name__: p[name] = "%s.%s" % (plugin.module_path, plugin.class_name) l1 = max(len(x) for x in p) l2 = max(10 + len(x) for x in p.values()) def hr(): print("=" * l1, "=" * l2) hr() print("%-*s" % (l1, "Name"), "%-*s" % (l2, "Class")) hr() for n in sorted(p): print("%-*s" % (l1, n), ":class:`~%s`" % p[n]) hr() print() rdflib-6.1.1/docs/rdf_terms.rst000066400000000000000000000150501415774155300164450ustar00rootroot00000000000000.. _rdf_terms: RDF terms in rdflib =================== RDF terms in rdflib =================== Terms are the kinds of objects that can appear in a quoted/asserted triples. Those that are part of core RDF concepts are: ``Blank Node``, ``URI Reference`` and ``Literal``, the latter consisting of a literal value and either a `datatype `_ or an :rfc:`3066` language tag. All terms in RDFLib are sub-classes of the :class:`rdflib.term.Identifier` class. Nodes are a subset of the Terms that the underlying store actually persists. The set of such Terms depends on whether or not the store is formula-aware. Stores that aren't formula-aware only persist those terms core to the RDF Model but those that are formula-aware also persist the N3 extensions. However, utility terms that only serve the purpose of matching nodes by term-patterns will probably only be terms and not nodes. URIRefs ======= A *URI reference* within an RDF graph is a Unicode string that does not contain any control characters ( #x00 - #x1F, #x7F-#x9F) and would produce a valid URI character sequence representing an absolute URI with optional fragment identifier -- `W3 RDF Concepts`__ .. __: http://www.w3.org/TR/rdf-concepts/#section-Graph-URIref .. autoclass:: rdflib.term.URIRef :noindex: .. code-block:: python >>> from rdflib import URIRef >>> uri = URIRef() Traceback (most recent call last): File "", line 1, in TypeError: __new__() missing 1 required positional argument: 'value' >>> uri = URIRef('') >>> uri rdflib.term.URIRef('') >>> uri = URIRef('http://example.com') >>> uri rdflib.term.URIRef('http://example.com') >>> uri.n3() '' .. _rdflibliterals: Literals ======== Literals are attribute values in RDF, for instance, a person's name, the date of birth, height, etc. Literals can have a datatype (i.e. this is a *double*) or a language tag (this label is in *English*). .. autoclass:: rdflib.term.Literal :noindex: A literal in an RDF graph contains one or two named components. All literals have a lexical form being a Unicode string, which SHOULD be in Normal Form C. Plain literals have a lexical form and optionally a language tag as defined by :rfc:`3066`, normalized to lowercase. An exception will be raised if illegal language-tags are passed to :meth:`rdflib.term.Literal.__init__`. Typed literals have a lexical form and a datatype URI being an RDF URI reference. .. note:: When using the language tag, care must be taken not to confuse language with locale. The language tag relates only to human language text. Presentational issues should be addressed in end-user applications. .. note:: The case normalization of language tags is part of the description of the abstract syntax, and consequently the abstract behaviour of RDF applications. It does not constrain an RDF implementation to actually normalize the case. Crucially, the result of comparing two language tags should not be sensitive to the case of the original input. -- `RDF Concepts and Abstract Syntax`__ .. __: http://www.w3.org/TR/rdf-concepts/#section-Graph-URIref BNodes ====== In RDF, a blank node (also called BNode) is a node in an RDF graph representing a resource for which a URI or literal is not given. The resource represented by a blank node is also called an anonymous resource. According to the RDF standard, a blank node can only be used as subject or object in a triple, although in some syntaxes like Notation 3 it is acceptable to use a blank node as a predicate. If a blank node has a node ID (not all blank nodes are labelled in all RDF serializations), it is limited in scope to a particular serialization of the RDF graph, i.e. the node p1 in the subsequent example does not represent the same node as a node named p1 in any other graph --`wikipedia`__ .. __: http://en.wikipedia.org/wiki/Blank_node .. autoclass:: rdflib.term.BNode :noindex: .. code-block:: python >>> from rdflib import BNode >>> bn = BNode() >>> bn rdflib.term.BNode('AFwALAKU0') >>> bn.n3() '_:AFwALAKU0' Python support -------------- RDFLib Literals essentially behave like unicode characters with an XML Schema datatype or language attribute. .. image:: /_static/datatype_hierarchy.png :alt: datatype hierarchy :align: center :width: 629 :height: 717 The class provides a mechanism to both convert Python literals (and their built-ins such as time/date/datetime) into equivalent RDF Literals and (conversely) convert Literals to their Python equivalent. This mapping to and from Python literals is done as follows: ====================== =========== XML Datatype Python type ====================== =========== None None [#f1]_ xsd:time time [#f2]_ xsd:date date xsd:dateTime datetime xsd:string None xsd:normalizedString None xsd:token None xsd:language None xsd:boolean boolean xsd:decimal Decimal xsd:integer long xsd:nonPositiveInteger int xsd:long long xsd:nonNegativeInteger int xsd:negativeInteger int xsd:int long xsd:unsignedLong long xsd:positiveInteger int xsd:short int xsd:unsignedInt long xsd:byte int xsd:unsignedShort int xsd:unsignedByte int xsd:float float xsd:double float xsd:base64Binary :mod:`base64` xsd:anyURI None rdf:XMLLiteral :class:`xml.dom.minidom.Document` [#f3]_ rdf:HTML :class:`xml.dom.minidom.DocumentFragment` ====================== =========== .. [#f1] plain literals map directly to value space .. [#f2] Date, time and datetime literals are mapped to Python instances using the `isodate `_ package). .. [#f3] this is a bit dirty - by accident the ``html5lib`` parser produces ``DocumentFragments``, and the xml parser ``Documents``, letting us use this to decide what datatype when round-tripping. An appropriate data-type and lexical representation can be found using: .. autofunction:: rdflib.term._castPythonToLiteral and the other direction with .. autofunction:: rdflib.term._castLexicalToPython All this happens automatically when creating ``Literal`` objects by passing Python objects to the constructor, and you never have to do this manually. You can add custom data-types with :func:`rdflib.term.bind`, see also :mod:`examples.custom_datatype` rdflib-6.1.1/docs/sphinx-requirements.txt000066400000000000000000000001211415774155300205120ustar00rootroot00000000000000sphinx==4.3.1 sphinxcontrib-apidoc git+https://github.com/gniezen/n3pygments.git rdflib-6.1.1/docs/upgrade4to5.rst000066400000000000000000000310461415774155300166260ustar00rootroot00000000000000.. _upgrade4to5: Upgrading from RDFLib version 4.2.2 to 5.0.0 ============================================ Upgrading 4.2.2 to 5.0.0 ============================================ RDFLib version 5.0.0 appeared over 3 years after the previous release, 4.2.2 and contains a large number of both enhancements and bug fixes. Fundamentally though, 5.0.0 is compatible with 4.2.2. Major Changes ------------- Literal Ordering ^^^^^^^^^^^^^^^^ Literal total ordering `PR #793 `_ is implemented. That means all literals can now be compared to be greater than or less than any other literal. This is required for implementing some specific SPARQL features, but it is counter-intuitive to those who are expecting a TypeError when certain normally-incompatible types are compared. For example, comparing a ``Literal(int(1), datatype=xsd:integer)`` to ``Literal(datetime.date(10,01,2020), datatype=xsd:date)`` using a ``>`` or ``<`` operator in rdflib 4.2.2 and earlier, would normally throw a TypeError, however in rdflib 5.0.0 this operation now returns a True or False according to the Literal Total Ordering according the rules outlined in `PR #793 `_ Removed RDF Parsers ^^^^^^^^^^^^^^^^^^^ The RDFa and Microdata format RDF parsers were removed from rdflib. There are still other python libraries available to implement these parsers. All Changes ----------- This list has been assembled from Pull Request and commit information. General Bugs Fixed: ^^^^^^^^^^^^^^^^^^^ * Pr 451 redux `PR #978 `_ * NTriples fails to parse URIs with only a scheme `ISSUE #920 `_ `PR #974 `_ * cannot clone it on windows - Remove colons from test result files. Fix #901. `ISSUE #901 `_ `PR #971 `_ * Add requirement for requests to setup.py `PR #969 `_ * fixed URIRef including native unicode characters `PR #961 `_ * DCTERMS.format not working `ISSUE #932 `_ * infixowl.manchesterSyntax do not encode strings `PR #906 `_ * Fix blank node label to not contain '_:' during parsing `PR #886 `_ * rename new SPARQLWrapper to SPARQLConnector `PR #872 `_ * Fix #859. Unquote and Uriquote Literal Datatype. `PR #860 `_ * Parsing nquads `ISSUE #786 `_ * ntriples spec allows for upper-cased lang tag, fixes #782 `PR #784 `_ * Error parsing N-Triple file using RDFlib `ISSUE #782 `_ * Adds escaped single quote to literal parser `PR #736 `_ * N3 parse error on single quote within single quotes `ISSUE #732 `_ * Fixed #725 `PR #730 `_ * test for issue #725: canonicalization collapses BNodes `PR #726 `_ * RGDA1 graph canonicalization sometimes still collapses distinct BNodes `ISSUE #725 `_ * Accept header should use a q parameter `PR #720 `_ * Added test for Issue #682 and fixed. `PR #718 `_ * Incompatibility with Python3: unichr `ISSUE #687 `_ * namespace.py include colon in ALLOWED_NAME_CHARS `PR #663 `_ * namespace.py fix compute_qname missing namespaces `PR #649 `_ * RDFa parsing Error! `__init__()` got an unexpected keyword argument 'encoding' `ISSUE #639 `_ * Bugfix: `term.Literal.__add__` `PR #451 `_ * fixup of #443 `PR #445 `_ * Microdata to rdf second edition bak `PR #444 `_ Enhanced Features: ^^^^^^^^^^^^^^^^^^ * Register additional serializer plugins for SPARQL mime types. `PR #987 `_ * Pr 388 redux `PR #979 `_ * Allows RDF terms introduced by JSON-LD 1.1 `PR #970 `_ * make SPARQLConnector work with DBpedia `PR #941 `_ * ClosedNamespace returns right exception for way of access `PR #866 `_ * Not adding all namespaces for n3 serializer `PR #832 `_ * Adds basic support of xsd:duration `PR #808 `_ * Add possibility to set authority and basepath to skolemize graph `PR #807 `_ * Change notation3 list realization to non-recursive function. `PR #805 `_ * Suppress warning for not using custom encoding. `PR #800 `_ * Add support to parsing large xml inputs `ISSUE #749 `_ `PR #750 `_ * improve hash efficiency by directly using str/unicode hash `PR #746 `_ * Added the csvw prefix to the RDFa initial context. `PR #594 `_ * syncing changes from pyMicrodata `PR #587 `_ * Microdata parser: updated the parser to the latest version of the microdata->rdf note (published in December 2014) `PR #443 `_ * Literal.toPython() support for xsd:hexBinary `PR #388 `_ SPARQL Fixes: ^^^^^^^^^^^^^ * Total order patch patch `PR #862 `_ * use <<= instead of deprecated << `PR #861 `_ * Fix #847 `PR #856 `_ * RDF Literal "1"^^xsd:boolean should _not_ coerce to True `ISSUE #847 `_ * Makes NOW() return an UTC date `PR #844 `_ * NOW() SPARQL should return an xsd:dateTime with a timezone `ISSUE #843 `_ * fix property paths bug: issue #715 `PR #822 `_ `ISSUE #715 `_ * MulPath: correct behaviour of n3() `PR #820 `_ * Literal total ordering `PR #793 `_ * Remove SPARQLWrapper dependency `PR #744 `_ * made UNION faster by not preventing duplicates `PR #741 `_ * added a hook to add custom functions to SPARQL `PR #723 `_ * Issue714 `PR #717 `_ * Use <<= instead of deprecated << in SPARQL parser `PR #417 `_ * Custom FILTER function for SPARQL engine `ISSUE #274 `_ Code Quality and Cleanups: ^^^^^^^^^^^^^^^^^^^^^^^^^^ * a slightly opinionated autopep8 run `PR #870 `_ * remove rdfa and microdata parsers from core RDFLib `PR #828 `_ * ClosedNamespace KeyError -> AttributeError `PR #827 `_ * typo in rdflib/plugins/sparql/update.py `ISSUE #760 `_ * Fix logging in interactive mode `PR #731 `_ * make namespace module flake8-compliant, change exceptions in that mod… `PR #711 `_ * delete ez_setup.py? `ISSUE #669 `_ * code duplication issue between rdflib and pymicrodata `ISSUE #582 `_ * Transition from 2to3 to use of six.py to be merged in 5.0.0-dev `PR #519 `_ * sparqlstore drop deprecated methods and args `PR #516 `_ * python3 code seems shockingly inefficient `ISSUE #440 `_ * removed md5_term_hash, fixes #240 `PR #439 `_ `ISSUE #240 `_ Testing: ^^^^^^^^ * 3.7 for travis `PR #864 `_ * Added trig unit tests to highlight some current parsing/serializing issues `PR #431 `_ Documentation Fixes: ^^^^^^^^^^^^^^^^^^^^ * Fix a doc string in the query module `PR #976 `_ * setup.py: Make the license field use an SPDX identifier `PR #789 `_ * Update README.md `PR #764 `_ * Update namespaces_and_bindings.rst `PR #757 `_ * DOC: README.md: rdflib-jsonld, https uris `PR #712 `_ * make doctest support py2/py3 `ISSUE #707 `_ * `pip install rdflib` (as per README.md) gets OSError on Mint 18.1 `ISSUE #704 `_ `PR #717 `_ * Use <<= instead of deprecated << in SPARQL parser `PR #417 `_ * Custom FILTER function for SPARQL engine `ISSUE #274 `_ Code Quality and Cleanups: ^^^^^^^^^^^^^^^^^^^^^^^^^^ * a slightly opinionated autopep8 run `PR #870 `_ * remove rdfa and microdata parsers from core RDFLib `PR #828 `_ * ClosedNamespace KeyError -> AttributeError `PR #827 `_ * typo in rdflib/plugins/sparql/update.py `ISSUE #760 `_ * Fix logging in interactive mode `PR #731 `_ * make namespace module flake8-compliant, change exceptions in that mod… `PR #711 `_ * delete ez_setup.py? `ISSUE #669 `_ * code duplication issue between rdflib and pymicrodata `ISSUE #582 `_ * Transition from 2to3 to use of six.py to be merged in 5.0.0-dev `PR #519 `_ * sparqlstore drop deprecated methods and args `PR #516 `_ * python3 code seems shockingly inefficient `ISSUE #440 `_ * removed md5_term_hash, fixes #240 `PR #439 `_ `ISSUE #240 `_ Testing: ^^^^^^^^ * 3.7 for travis `PR #864 `_ * Added trig unit tests to highlight some current parsing/serializing issues `PR #431 `_ Documentation Fixes: ^^^^^^^^^^^^^^^^^^^^ * Fix a doc string in the query module `PR #976 `_ * setup.py: Make the license field use an SPDX identifier `PR #789 `_ * Update README.md `PR #764 `_ * Update namespaces_and_bindings.rst `PR #757 `_ * DOC: README.md: rdflib-jsonld, https uris `PR #712 `_ * make doctest support py2/py3 `ISSUE #707 `_ * `pip install rdflib` (as per README.md) gets OSError on Mint 18.1 `ISSUE #704 `_ rdflib-6.1.1/docs/upgrade5to6.rst000066400000000000000000000050771415774155300166350ustar00rootroot00000000000000.. _upgrade4to5: Upgrading from RDFLib version 5.0.0 to 6.0.0 ============================================ Upgrading 5.0.0 to 6.0.0 ============================================ 6.0.0 fully adopts Python 3 practices and drops Python 2 support so it is neater, faster and generally more modern than 5.0.0. It also tidies up the ``Graph`` API (removing duplicate functions) so it does include a few breaking changes. Additionally, there is a long list of PRs merged into 6.0.0 adding a number of small fixes and features which are listed below. RDFLib version 5.0.0 was released in 2020, 3 years after the previous version (4.2.2) and is fundamentally 5.0.0 compatible with. If you need very long-term backwards-compatibility or Python 2 support, you need 5.0.0. Major Changes ------------- The most notable changes in RDFLib 6.0.0 are: Python 3.7+ ^^^^^^^^^^^ * The oldest version of python you can use to run RDFLib is now 3.7. * This is a big jump from RDFLib 5.0.0 that worked on python 2.7 and 3.5. * This change is to allow the library maintainers to adopt more modern development tools, newer language features, and avoid the need to support EOL versions of python in he future JSON-LD integration and JSON-LD 1.1 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ * The json-ld serializer/parser plugin was by far the most commonly used RDFLib addon. * Last year we brought it under the RDFLib org in Github * Now for 6.0.0 release the JSON-LD serializer and parser are integrated into RDFLib core * This includes the experimental support for the JSON-LD v1.1 spec * You no longer need to install the json-ld dependency separately. All Changes ----------- This list has been assembled from Pull Request and commit information. General Bugs Fixed: ^^^^^^^^^^^^^^^^^^^ * Pr 451 redux `PR #978 `_ Enhanced Features: ^^^^^^^^^^^^^^^^^^ * Register additional serializer plugins for SPARQL mime types. `PR #987 `_ SPARQL Fixes: ^^^^^^^^^^^^^ * Total order patch patch `PR #862 `_ Code Quality and Cleanups: ^^^^^^^^^^^^^^^^^^^^^^^^^^ * a slightly opinionated autopep8 run `PR #870 `_ Testing: ^^^^^^^^ * 3.7 for travis `PR #864 `_ Documentation Fixes: ^^^^^^^^^^^^^^^^^^^^ * Fix a doc string in the query module `PR #976 `_ Integrade JSON-LD into RDFLib: ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `PR #1354 `_ rdflib-6.1.1/docs/utilities.rst000066400000000000000000000110601415774155300164700ustar00rootroot00000000000000Utilities & convenience functions ================================= For RDF programming, RDFLib and Python may not be the fastest tools, but we try hard to make them the easiest and most convenient to use and thus the *fastest* overall! This is a collection of hints and pointers for hassle-free RDF coding. Functional properties --------------------- Use :meth:`~rdflib.graph.Graph.value` and :meth:`~rdflib.graph.Graph.set` to work with :term:`functional property` instances, i.e. properties than can only occur once for a resource. .. code-block:: python from rdflib import Graph, URIRef, Literal, BNode from rdflib.namespace import FOAF, RDF g = Graph() g.bind("foaf", FOAF) # Add demo data bob = URIRef("http://example.org/people/Bob") g.add((bob, RDF.type, FOAF.Person)) g.add((bob, FOAF.name, Literal("Bob"))) g.add((bob, FOAF.age, Literal(38))) # To get a single value, use 'value' print(g.value(bob, FOAF.age)) # prints: 38 # To change a single of value, use 'set' g.set((bob, FOAF.age, Literal(39))) print(g.value(bob, FOAF.age)) # prints: 39 Slicing graphs -------------- Python allows slicing arrays with a ``slice`` object, a triple of ``start``, ``stop`` and ``step-size``: .. code-block:: python for i in range(20)[2:9:3]: print(i) # prints: # 2, 5, 8 RDFLib graphs override ``__getitem__`` and we pervert the slice triple to be a RDF triple instead. This lets slice syntax be a shortcut for :meth:`~rdflib.graph.Graph.triples`, :meth:`~rdflib.graph.Graph.subject_predicates`, :meth:`~rdflib.graph.Graph.contains`, and other Graph query-methods: .. code-block:: python from rdflib import Graph, URIRef, Literal, BNode from rdflib.namespace import FOAF, RDF g = Graph() g.bind("foaf", FOAF) # Add demo data bob = URIRef("http://example.org/people/Bob") bill = URIRef("http://example.org/people/Bill") g.add((bob, RDF.type, FOAF.Person)) g.add((bob, FOAF.name, Literal("Bob"))) g.add((bob, FOAF.age, Literal(38))) g.add((bob, FOAF.knows, bill)) print(g[:]) # same as print(iter(g)) print(g[bob]) # same as print(g.predicate_objects(bob)) print(g[bob: FOAF.knows]) # same as print(g.objects(bob, FOAF.knows)) print(g[bob: FOAF.knows: bill]) # same as print((bob, FOAF.knows, bill) in g) print(g[:FOAF.knows]) # same as print(g.subject_objects(FOAF.knows)) See :mod:`examples.slice` for a complete example. .. note:: Slicing is convenient for run-once scripts for playing around in the Python ``REPL``, however since slicing returns tuples of varying length depending on which parts of the slice are bound, you should be careful using it in more complicated programs. If you pass in variables, and they are ``None`` or ``False``, you may suddenly get a generator of different length tuples back than you expect. SPARQL Paths ------------ `SPARQL property paths `_ are possible using overridden operators on URIRefs. See :mod:`examples.foafpaths` and :mod:`rdflib.paths`. Serializing a single term to N3 ------------------------------- For simple output, or simple serialisation, you often want a nice readable representation of a term. All terms (URIRef, Literal etc.) have a ``n3``, method, which will return a suitable N3 format: .. code-block:: python from rdflib import Graph, URIRef, Literal from rdflib.namespace import FOAF # A URIRef person = URIRef("http://xmlns.com/foaf/0.1/Person") print(person.n3()) # prints: # Simplifying the output with a namespace prefix: g = Graph() g.bind("foaf", FOAF) print(person.n3(g.namespace_manager)) # prints foaf:Person # A typed literal l = Literal(2) print(l.n3()) # prints "2"^^ # Simplifying the output with a namespace prefix # XSD is built in, so no need to bind() it! l.n3(g.namespace_manager) # prints: "2"^^xsd:integer Parsing data from a string -------------------------- You can parse data from a string with the ``data`` param: .. code-block:: python from rdflib import Graph g = Graph().parse(data=" .") for r in g.triples((None, None, None)): print(r) # prints: (rdflib.term.URIRef('a:'), rdflib.term.URIRef('p:'), rdflib.term.URIRef('p:')) Command Line tools ------------------ RDFLib includes a handful of commandline tools, see :mod:`rdflib.tools`. rdflib-6.1.1/examples/000077500000000000000000000000001415774155300146135ustar00rootroot00000000000000rdflib-6.1.1/examples/__init__.py000066400000000000000000000000001415774155300167120ustar00rootroot00000000000000rdflib-6.1.1/examples/berkeleydb_example.py000066400000000000000000000076001415774155300210130ustar00rootroot00000000000000""" BerkeleyDB in use as a persistent Graph store. Example 1: simple actions * creating a ConjunctiveGraph using the BerkeleyDB Store * adding triples to it * counting them * closing the store, emptying the graph * re-opening the store using the same DB files * getting the same count of triples as before Example 2: larger data * loads multiple graphs downloaded from GitHub into a BerkeleyDB-baked graph stored in the folder gsq_vocabs. * does not delete the DB at the end so you can see it on disk """ import os from rdflib import ConjunctiveGraph, Namespace, Literal from rdflib.store import NO_STORE, VALID_STORE from tempfile import mktemp def example_1(): """Creates a ConjunctiveGraph and performs some BerkeleyDB tasks with it""" path = mktemp() # Declare we are using a BerkeleyDB Store graph = ConjunctiveGraph("BerkeleyDB") # Open previously created store, or create it if it doesn't exist yet # (always doesn't exist in this example as using temp file location) rt = graph.open(path, create=False) if rt == NO_STORE: # There is no underlying BerkeleyDB infrastructure, so create it print("Creating new DB") graph.open(path, create=True) else: print("Using existing DB") assert rt == VALID_STORE, "The underlying store is corrupt" print("Triples in graph before add:", len(graph)) print("(will always be 0 when using temp file for DB)") # Now we'll add some triples to the graph & commit the changes EG = Namespace("http://example.net/test/") graph.bind("eg", EG) graph.add((EG["pic:1"], EG.name, Literal("Jane & Bob"))) graph.add((EG["pic:2"], EG.name, Literal("Squirrel in Tree"))) graph.commit() print("Triples in graph after add:", len(graph)) print("(should be 2)") # display the graph in Turtle print(graph.serialize()) # close when done, otherwise BerkeleyDB will leak lock entries. graph.close() graph = None # reopen the graph graph = ConjunctiveGraph("BerkeleyDB") graph.open(path, create=False) print("Triples still in graph:", len(graph)) print("(should still be 2)") graph.close() # Clean up the temp folder to remove the BerkeleyDB database files... for f in os.listdir(path): os.unlink(path + "/" + f) os.rmdir(path) def example_2(): """Loads a number of SKOS vocabularies from GitHub into a BerkeleyDB-backed graph stored in the local folder 'gsq_vocabs' Should print out the number of triples after each load, e.g.: 177 248 289 379 421 628 764 813 965 1381 9666 9719 ... """ from urllib.request import urlopen, Request from urllib.error import HTTPError import json import base64 g = ConjunctiveGraph("BerkeleyDB") g.open("gsg_vocabs", create=True) # gsq_vocabs = "https://api.github.com/repos/geological-survey-of-queensland/vocabularies/git/trees/master" gsq_vocabs = "https://api.github.com/repos/geological-survey-of-queensland/vocabularies/git/trees/cd7244d39337c1f4ef164b1cf1ea1f540a7277db" try: res = urlopen(Request(gsq_vocabs, headers={"Accept": "application/json"})) except HTTPError as e: return e.code, str(e), None data = res.read() encoding = res.info().get_content_charset("utf-8") j = json.loads(data.decode(encoding)) for v in j["tree"]: # process the element in GitHub result if it's a Turtle file if v["path"].endswith(".ttl"): # for each file, call it by URL, decode it and parse it into the graph r = urlopen(v["url"]) content = json.loads(r.read().decode())["content"] g.parse(data=base64.b64decode(content).decode(), format="turtle") print(len(g)) print("loading complete") if __name__ == "__main__": example_1() example_2() rdflib-6.1.1/examples/conjunctive_graphs.py000066400000000000000000000041061415774155300210610ustar00rootroot00000000000000""" An RDFLib ConjunctiveGraph is an (unnamed) aggregation of all the Named Graphs within a Store. The :meth:`~rdflib.graph.ConjunctiveGraph.get_context` method can be used to get a particular named graph for use, such as to add triples to, or the default graph can be used. This example shows how to create Named Graphs and work with the conjunction (union) of all the graphs. """ from rdflib import Namespace, Literal, URIRef from rdflib.graph import Graph, ConjunctiveGraph from rdflib.plugins.stores.memory import Memory if __name__ == "__main__": LOVE = Namespace("http://love.com#") LOVERS = Namespace("http://love.com/lovers/") mary = URIRef("http://love.com/lovers/mary") john = URIRef("http://love.com/lovers/john") cmary = URIRef("http://love.com/lovers/mary") cjohn = URIRef("http://love.com/lovers/john") store = Memory() g = ConjunctiveGraph(store=store) g.bind("love", LOVE) g.bind("lovers", LOVERS) # Add a graph containing Mary's facts to the Conjunctive Graph gmary = Graph(store=store, identifier=cmary) # Mary's graph only contains the URI of the person she loves, not his cute name gmary.add((mary, LOVE.hasName, Literal("Mary"))) gmary.add((mary, LOVE.loves, john)) # Add a graph containing John's facts to the Conjunctive Graph gjohn = Graph(store=store, identifier=cjohn) # John's graph contains his cute name gjohn.add((john, LOVE.hasCuteName, Literal("Johnny Boy"))) # Enumerate contexts print("Contexts:") for c in g.contexts(): print(f"-- {c.identifier} ") print("===================") # Separate graphs print("John's Graph:") print(gjohn.serialize()) print("===================") print("Mary's Graph:") print(gmary.serialize()) print("===================") print("Full Graph") print(g.serialize()) print("===================") print("Query the conjunction of all graphs:") xx = None for x in g[mary : LOVE.loves / LOVE.hasCuteName]: xx = x print("Q: Who does Mary love?") print("A: Mary loves {}".format(xx)) rdflib-6.1.1/examples/custom_datatype.py000066400000000000000000000027041415774155300203750ustar00rootroot00000000000000""" RDFLib can map between RDF data-typed literals and Python objects. Mapping for integers, floats, dateTimes, etc. are already added, but you can also add your own. This example shows how :meth:`rdflib.term.bind` lets you register new mappings between literal datatypes and Python objects """ from rdflib import Graph, Literal, Namespace, XSD from rdflib import term if __name__ == "__main__": # Complex numbers are not registered by default # No custom constructor/serializer needed since # complex('(2+3j)') works fine term.bind(XSD.complexNumber, complex) # Create a complex number RDFlib Literal EG = Namespace("http://example.com/") c = complex(2, 3) l = Literal(c) # Add it to a graph g = Graph() g.add((EG.mysubject, EG.myprop, l)) # Print the triple to see what it looks like print(list(g)[0]) # prints: ( # rdflib.term.URIRef('http://example.com/mysubject'), # rdflib.term.URIRef('http://example.com/myprop'), # rdflib.term.Literal( # '(2+3j)', # datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#complexNumber') # ) # ) # Round-trip through n3 serialize/parse g2 = Graph().parse(data=g.serialize()) l2 = list(g2)[0] print(l2) # Compare with the original python complex object (should be True) # l2[2] is the object of the triple print(l2[2].value == c) rdflib-6.1.1/examples/custom_eval.py000066400000000000000000000033501415774155300175070ustar00rootroot00000000000000""" This example shows how a custom evaluation function can be added to handle certain SPARQL Algebra elements. A custom function is added that adds ``rdfs:subClassOf`` "inference" when asking for ``rdf:type`` triples. Here the custom eval function is added manually, normally you would use setuptools and entry_points to do it: i.e. in your setup.py:: entry_points = { 'rdf.plugins.sparqleval': [ 'myfunc = mypackage:MyFunction', ], } """ import rdflib from rdflib.plugins.sparql.evaluate import evalBGP from rdflib.namespace import FOAF, RDF, RDFS inferredSubClass = RDFS.subClassOf * "*" # any number of rdfs.subClassOf def customEval(ctx, part): """ Rewrite triple patterns to get super-classes """ if part.name == "BGP": # rewrite triples triples = [] for t in part.triples: if t[1] == RDF.type: bnode = rdflib.BNode() triples.append((t[0], t[1], bnode)) triples.append((bnode, inferredSubClass, t[2])) else: triples.append(t) # delegate to normal evalBGP return evalBGP(ctx, triples) raise NotImplementedError() if __name__ == "__main__": # add function directly, normally we would use setuptools and entry_points rdflib.plugins.sparql.CUSTOM_EVALS["exampleEval"] = customEval g = rdflib.Graph() g.parse("foaf.n3") # Add the subClassStmt so that we can query for it! g.add((FOAF.Person, RDFS.subClassOf, FOAF.Agent)) # Find all FOAF Agents for x in g.query( f""" PREFIX foaf: <{FOAF}> SELECT * WHERE {{ ?s a foaf:Agent . }} """ ): print(x) rdflib-6.1.1/examples/datasets.py000066400000000000000000000070611415774155300170010ustar00rootroot00000000000000""" An RDFLib Dataset is a slight extension to ConjunctiveGraph: it uses simpler terminology and has a few additional convenience method extensions, for example add() can be used to add quads directly to a specific Graph within the Dataset. This example file shows how to decalre a Dataset, add content to it, serialise it, query it and remove things from it. """ from rdflib import Dataset, URIRef, Literal, Namespace # # Create & Add # # Create an empty Dataset d = Dataset() # Add a namespace prefix to it, just like for Graph d.bind("ex", Namespace("http://example.com/")) # Declare a Graph URI to be used to identify a Graph graph_1 = URIRef("http://example.com/graph-1") # Add an empty Graph, identified by graph_1, to the Dataset d.graph(identifier=graph_1) # Add two quads to Graph graph_1 in the Dataset d.add( ( URIRef("http://example.com/subject-x"), URIRef("http://example.com/predicate-x"), Literal("Triple X"), graph_1, ) ) d.add( ( URIRef("http://example.com/subject-z"), URIRef("http://example.com/predicate-z"), Literal("Triple Z"), graph_1, ) ) # Add another quad to the Dataset to a non-existent Graph: # the Graph is created automatically d.add( ( URIRef("http://example.com/subject-y"), URIRef("http://example.com/predicate-y"), Literal("Triple Y"), URIRef("http://example.com/graph-2"), ) ) # printing the Dataset like this: print(d.serialize(format="trig")) # produces a result like this: """ @prefix ex: . ex:graph-1 { ex:subject-x ex:predicate-x "Triple X" . ex:subject-z ex:predicate-z "Triple Z" . } ex:graph-2 { ex:subject-y ex:predicate-y "Triple Y" . } """ print("Printing Serialised Dataset:") print("---") print(d.serialize(format="trig")) print("---") print() print() # # Use & Query # # print the length of the Dataset, i.e. the count of all triples in all Graphs # we should get """ 3 """ print("Printing Dataset Length:") print("---") print(len(d)) print("---") print() print() # Query one graph in the Dataset for all it's triples # we should get """ (rdflib.term.URIRef('http://example.com/subject-z'), rdflib.term.URIRef('http://example.com/predicate-z'), rdflib.term.Literal('Triple Z')) (rdflib.term.URIRef('http://example.com/subject-x'), rdflib.term.URIRef('http://example.com/predicate-x'), rdflib.term.Literal('Triple X')) """ print("Printing all triple from one Graph in the Dataset:") print("---") for triple in d.triples((None, None, None, graph_1)): print(triple) print("---") print() print() # Query the union of all graphs in the dataset for all triples # we should get Nothing: """ """ # A Dataset's default union graph does not exist by default (default_union property is False) print("Attempt #1 to print all triples in the Dataset:") print("---") for triple in d.triples((None, None, None, None)): print(triple) print("---") print() print() # Set the Dataset's default_union property to True and re-query d.default_union = True print("Attempt #2 to print all triples in the Dataset:") print("---") for triple in d.triples((None, None, None, None)): print(triple) print("---") print() print() # # Remove # # Remove Graph graph_1 from the Dataset d.remove_graph(graph_1) # printing the Dataset like this: print(d.serialize(format="trig")) # now produces a result like this: """ ex:graph-2 { ex:subject-y ex:predicate-y "Triple Y" . } """ print("Printing Serialised Dataset after graph_1 removal:") print("---") print(d.serialize(format="trig").strip()) print("---") print() print() rdflib-6.1.1/examples/film.py000066400000000000000000000123161415774155300161170ustar00rootroot00000000000000#!/usr/bin/env python """ film.py: a simple tool to manage your movies reviews Simon Rozet, http://atonie.org/ - manage directors and writers - manage actors - handle non IMDB uri - markdown support in comment Requires download and import of Python imdb library from https://imdbpy.github.io/ - (warning: installation will trigger automatic installation of several other packages) Usage: film.py whoami "John Doe " Initialize the store and set your name and email. film.py whoami Tell you who you are film.py http://www.imdb.com/title/tt0105236/ Review the movie "Reservoir Dogs" """ import datetime import os import sys import re import time try: import imdb except ImportError: imdb = None from rdflib import BNode, ConjunctiveGraph, URIRef, Literal, Namespace from rdflib.namespace import FOAF, DC, RDF storefn = os.path.expanduser("~/movies.n3") # storefn = '/home/simon/codes/film.dev/movies.n3' storeuri = "file://" + storefn title = "Movies viewed by %s" r_who = re.compile( r"^(.*?) <([a-z0-9_-]+(\.[a-z0-9_-]+)*@[a-z0-9_-]+(\.[a-z0-9_-]+)+)>$" ) IMDB = Namespace("http://www.csd.abdn.ac.uk/~ggrimnes/dev/imdb/IMDB#") REV = Namespace("http://purl.org/stuff/rev#") class Store: def __init__(self): self.graph = ConjunctiveGraph() if os.path.exists(storefn): self.graph.parse(storeuri) self.graph.bind("dc", DC) self.graph.bind("foaf", FOAF) self.graph.bind("imdb", IMDB) self.graph.bind("rev", "http://purl.org/stuff/rev#") def save(self): self.graph.serialize(storeuri, format="n3") def who(self, who=None): if who is not None: name, email = (r_who.match(who).group(1), r_who.match(who).group(2)) self.graph.add((URIRef(storeuri), DC["title"], Literal(title % name))) self.graph.add((URIRef(storeuri + "#author"), RDF.type, FOAF["Person"])) self.graph.add((URIRef(storeuri + "#author"), FOAF["name"], Literal(name))) self.graph.add((URIRef(storeuri + "#author"), FOAF["mbox"], Literal(email))) self.save() else: return self.graph.objects(URIRef(storeuri + "#author"), FOAF["name"]) def new_movie(self, movie): movieuri = URIRef("http://www.imdb.com/title/tt%s/" % movie.movieID) self.graph.add((movieuri, RDF.type, IMDB["Movie"])) self.graph.add((movieuri, DC["title"], Literal(movie["title"]))) self.graph.add((movieuri, IMDB["year"], Literal(int(movie["year"])))) self.save() def new_review(self, movie, date, rating, comment=None): review = BNode() # @@ humanize the identifier (something like #rev-$date) movieuri = URIRef("http://www.imdb.com/title/tt%s/" % movie.movieID) self.graph.add( (movieuri, REV["hasReview"], URIRef("%s#%s" % (storeuri, review))) ) self.graph.add((review, RDF.type, REV["Review"])) self.graph.add((review, DC["date"], Literal(date))) self.graph.add((review, REV["maxRating"], Literal(5))) self.graph.add((review, REV["minRating"], Literal(0))) self.graph.add((review, REV["reviewer"], URIRef(storeuri + "#author"))) self.graph.add((review, REV["rating"], Literal(rating))) if comment is not None: self.graph.add((review, REV["text"], Literal(comment))) self.save() def movie_is_in(self, uri): return (URIRef(uri), RDF.type, IMDB["Movie"]) in self.graph def help(): print(__doc__.split("--")[1]) def main(argv=None): if not argv: argv = sys.argv s = Store() if argv[1] in ("help", "--help", "h", "-h"): help() elif argv[1] == "whoami": if os.path.exists(storefn): print(list(s.who())[0]) else: s.who(argv[2]) elif argv[1].startswith("http://www.imdb.com/title/tt"): if s.movie_is_in(argv[1]): raise else: i = imdb.IMDb() movie = i.get_movie(argv[1][len("http://www.imdb.com/title/tt") : -1]) print("%s (%s)" % (movie["title"].encode("utf-8"), movie["year"])) for director in movie["director"]: print("directed by: %s" % director["name"].encode("utf-8")) for writer in movie["writer"]: print("written by: %s" % writer["name"].encode("utf-8")) s.new_movie(movie) rating = None while not rating or (rating > 5 or rating <= 0): try: rating = int(eval(input("Rating (on five): "))) except ValueError: rating = None date = None while not date: try: i = eval('"{}"'.format(input("Review date (YYYY-MM-DD): "))) date = datetime.datetime(*time.strptime(i, "%Y-%m-%d")[:6]) except: date = None comment = eval('"{}"'.format(input("Comment: "))) s.new_review(movie, date, rating, comment) else: help() if __name__ == "__main__": if not imdb: raise Exception( 'This example requires the IMDB library! Install with "pip install imdbpy"' ) main() rdflib-6.1.1/examples/foaf.n3000066400000000000000000000312631415774155300157750ustar00rootroot00000000000000@prefix cc: . @prefix con: . @prefix dc: . @prefix foaf: . @prefix geo: . @prefix geo1: . @prefix owl: . @prefix rdf: . @prefix rdfs: . @prefix xml: . @prefix xsd: . foaf:maker . a foaf:PersonalProfileDocument ; cc:license ; dc:title "Tim Berners-Lee's FOAF file" ; foaf:maker ; foaf:primaryTopic . a foaf:Person ; rdfs:seeAlso ; foaf:homepage ; foaf:mbox , ; foaf:mbox_sha1sum "6de4ff27ef927b9ba21ccc88257e41a2d7e7d293" ; foaf:name "Dean Jackson" . dc:title "timbl's blog" ; rdfs:seeAlso ; foaf:maker . foaf:member . rdfs:seeAlso ; = , ; foaf:mbox_sha1sum "70c053d15de49ff03a1bcc374e4119b40798a66e" . dc:title "Identity, Reference and the Web workshop 2006" ; con:participant . rdfs:label "The Next Wave of the Web (Plenary Panel)" ; con:participant . rdfs:label "W3C" ; rdfs:seeAlso ; con:publicHomePage ; foaf:homepage ; foaf:logo ; foaf:name "World Wide Web Consortium" . dc:creator ; dc:title "Weaving the Web: The Original Design and Ultimate Destiny of the World Wide Web" . foaf:name "Henry Story" . a foaf:Person ; foaf:img ; foaf:name "John Gage" . a foaf:Person ; foaf:name "John Klensin" . a foaf:Person ; foaf:name "John Markoff" . a foaf:Person ; = ; foaf:homepage ; foaf:img ; foaf:name "John Seely Brown" . a foaf:Person ; foaf:name "Tim Bray" . foaf:givenName "Joe" ; foaf:name "Joe Lambda" . a foaf:Person ; foaf:name "Robert Hoffmann" . dc:title "W3C Standards and Technical Reports" . a foaf:Person ; rdfs:seeAlso ; foaf:name "Coralie Mercier" . a foaf:Person ; rdfs:seeAlso ; foaf:homepage ; foaf:mbox , , ; foaf:name "Edd Dumbill" ; foaf:nick "edd" . a foaf:Person ; = ; foaf:img ; foaf:mbox ; foaf:name "Libby Miller" . foaf:name "Susie Stephens" ; foaf:organization . foaf:name "Daniel Krech" . foaf:name "Christoph Bussler" . foaf:name "Nicholas Gibbins" . foaf:name "Wendy Hall" . foaf:name "Nigel Shadbolt" . foaf:name "Les Carr" . a foaf:Person ; foaf:name "Sean Palmer" . foaf:name "Charles McCathieNevile" . foaf:name "Håkon Wium Lie" . a foaf:Person ; foaf:name "Kingsley Idehen" . a foaf:Person ; foaf:name "Norman Walsh" . foaf:name "Oshani Seneviratne" . a foaf:Person ; foaf:mailbox ; foaf:name "Lalana Kagal" . foaf:name "Peter Szolovits" . a foaf:Person ; foaf:name "Simon J. Hernandez" . a foaf:Person ; foaf:name "Tom Ilube" . foaf:name "Henrik Nielsen" . a foaf:Person ; foaf:homepage ; foaf:img ; foaf:name "Ira Fuchs" . foaf:name "Philippe Le Hégaret" . foaf:name "mc schraefel" . foaf:name "Shinnyih Huang" . a foaf:Person ; rdfs:seeAlso ; foaf:mbox ; foaf:name "Aaron Swartz" . foaf:name "Lee Feigenbaum" . foaf:name "Jim Hendler" . a foaf:Person ; foaf:mailbox ; foaf:name "Dave Beckett" . a foaf:Person ; foaf:name "Yolanda Gill" . foaf:mbox_sha1sum "5ac8032d5f6012aa1775ea2f63e1676bafd5e80b", "c21b7ed00d78a35efcd8e567f8fd9cca71058c5", "eccd01ba8ce2391a439e9b052a9fbf37eae9f732" ; foaf:name "Ivan Herman" . foaf:name "Kjetil Kjernsmo" . foaf:name "Ora Lassila" . foaf:name "Bijan Parsia" . foaf:name "Jennifer Golbeck" . a foaf:Person ; rdfs:label "Amy van der Hiel" ; rdfs:seeAlso ; con:familyName "van der Hiel" ; con:givenName "Amy" ; foaf:mbox ; foaf:mbox_sha1sum "1839a1cc2e719a85ea7d9007f587b2899cd94064" ; foaf:name "Amy van der Hiel" . a foaf:Person ; rdfs:seeAlso ; foaf:mbox ; foaf:name "Dan Connolly" ; foaf:nick "DanCon" . a foaf:Person ; rdfs:seeAlso ; foaf:homepage ; foaf:img , ; foaf:mbox ; foaf:name "Eric Miller" . foaf:name "Ian Jacobs" . a foaf:Person ; foaf:mbox_sha1sum "032c319f439f63efba54f4fa51bfb3a3fafedfbe" ; foaf:name "Daniel J Weitzner" . rdfs:seeAlso ; foaf:mbox ; foaf:name "Karl Dubost" . a foaf:Person ; foaf:img ; foaf:knows [ a foaf:Person ; rdfs:seeAlso ; foaf:mbox_sha1sum "669fe353dbef63d12ba11f69ace8acbec1ac8b17" ; foaf:name "Danny Ayers" ], , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , , ; foaf:name "Tim Berners-Lee" . rdflib-6.1.1/examples/foafpaths.py000066400000000000000000000022621415774155300171420ustar00rootroot00000000000000""" SPARQL 1.1 defines path operators for combining/repeating predicates in triple-patterns. We overload some Python operators on URIRefs to allow creating path operators directly in Python. ============ ========================================= Operator Path ============ ========================================= ``p1 / p2`` Path sequence ``p1 | p2`` Path alternative ``p1 * '*'`` chain of 0 or more p's ``p1 * '+'`` chain of 1 or more p's ``p1 * '?'`` 0 or 1 p ``~p1`` p1 inverted, i.e. (s p1 o) <=> (o ~p1 s) ``-p1`` NOT p1, i.e. any property but p1 ============ ========================================= These can then be used in property position for ``s,p,o`` triple queries for any graph method. See the docs for :mod:`rdflib.paths` for the details. This example shows how to get the name of friends (i.e values two steps away x knows y, y name z) with a single query. """ from rdflib import URIRef, Graph from rdflib.namespace import FOAF if __name__ == "__main__": g = Graph() g.parse("foaf.n3") tim = URIRef("http://www.w3.org/People/Berners-Lee/card#i") print("Timbl knows:") for o in g.objects(tim, FOAF.knows / FOAF.name): print(o) rdflib-6.1.1/examples/prepared_query.py000066400000000000000000000013331415774155300202140ustar00rootroot00000000000000""" SPARQL Queries be prepared (i.e parsed and translated to SPARQL algebra) by the :meth:`rdflib.plugins.sparql.prepareQuery` method. ``initNs`` can be used instead of PREFIX values. When executing, variables can be bound with the ``initBindings`` keyword parameter. """ import rdflib from rdflib.plugins.sparql import prepareQuery from rdflib.namespace import FOAF if __name__ == "__main__": q = prepareQuery( "SELECT ?name WHERE { ?person foaf:knows/foaf:name ?name . }", initNs={"foaf": FOAF}, ) g = rdflib.Graph() g.parse("foaf.n3") tim = rdflib.URIRef("http://www.w3.org/People/Berners-Lee/card#i") for row in g.query(q, initBindings={"person": tim}): print(row.name) rdflib-6.1.1/examples/resource_example.py000066400000000000000000000025421415774155300205320ustar00rootroot00000000000000""" RDFLib has a :class:`~rdflib.resource.Resource` class, for a resource-centric API. The :class:`~rdflib.Graph` class also has a ``resource`` function that can be used to create resources and manipulate them by quickly adding or querying for triples where this resource is the subject. This example shows g.resource() in action. """ from rdflib import Graph, RDF, RDFS, Literal from rdflib.namespace import FOAF if __name__ == "__main__": g = Graph() # Create a Resource within graph g bob = g.resource("http://example.com/bob") # .set replaces all other values bob.set(RDF.type, FOAF.Person) bob.set(FOAF.name, Literal("Bob")) bill = g.resource("http://example.com/bill") # .add adds to existing values bill.add(RDF.type, FOAF.Person) bill.add(RDF.type, FOAF.Agent) bill.set(RDFS.label, Literal("Bill")) bill.add(FOAF.knows, bob) # Resources returned when querying are 'auto-boxed' as resources: print(f"Bill knows: {bill.value(FOAF.knows).value(FOAF.name)}") # Slicing ([] syntax) can also be used: for friend in bill[FOAF.knows]: print(f"Bill knows: {next(friend[FOAF.name])}") # Or even quicker with paths: for friend in bill[FOAF.knows / FOAF.name]: print(f"Bill knows: {friend}") # Setting single properties is also possible: bill[RDFS.label] = Literal("William") rdflib-6.1.1/examples/simple_example.py000066400000000000000000000034721415774155300201770ustar00rootroot00000000000000from rdflib import Graph, Literal, BNode, RDF from rdflib.namespace import FOAF, DC if __name__ == "__main__": store = Graph() # Bind a few prefix, namespace pairs for pretty output store.bind("dc", DC) store.bind("foaf", FOAF) # Create an identifier to use as the subject for Donna. donna = BNode() # Add triples using store's add method. store.add((donna, RDF.type, FOAF.Person)) store.add((donna, FOAF.nick, Literal("donna", lang="foo"))) store.add((donna, FOAF.name, Literal("Donna Fales"))) # Iterate over triples in store and print them out. print("--- printing raw triples ---") for s, p, o in store: print(s, p, o) # For each foaf:Person in the store print out its mbox property. print() print("--- printing mboxes ---") for person in store.subjects(RDF.type, FOAF["Person"]): for mbox in store.objects(person, FOAF["mbox"]): print(mbox) print("--- saving RDF to a file (donna_foaf.rdf) ---") # Serialize the store as RDF/XML to the file donna_foaf.rdf. store.serialize("donna_foaf.rdf", format="pretty-xml", max_depth=3) # Let's show off the serializers print() print("RDF Serializations:") # Serialize as Turtle (default) print("--- start: turtle ---") print(store.serialize()) print("--- end: turtle ---\n") # Serialize as XML print("--- start: rdf-xml ---") print(store.serialize(format="pretty-xml")) print("--- end: rdf-xml ---\n") # Serialize as NTriples print("--- start: ntriples ---") print(store.serialize(format="nt")) print("--- end: ntriples ---\n") # Serialize as JSON-LD # only if you have the JSON-LD plugin installed! print("--- start: JSON-LD ---") print(store.serialize(format="json-ld")) print("--- end: JSON-LD ---\n") rdflib-6.1.1/examples/slice.py000066400000000000000000000012721415774155300162660ustar00rootroot00000000000000""" RDFLib Graphs (and Resources) can be "sliced" with [] syntax This is a short-hand for iterating over triples. Combined with SPARQL paths (see ``foafpaths.py``) - quite complex queries can be realised. See :meth:`rdflib.graph.Graph.__getitem__` for details """ from rdflib import Graph, RDF from rdflib.namespace import FOAF if __name__ == "__main__": graph = Graph() graph.load("foaf.n3", format="n3") for person in graph[: RDF.type : FOAF.Person]: friends = list(graph[person : FOAF.knows * "+" / FOAF.name]) if friends: print(f"{graph.value(person, FOAF.name)}'s circle of friends:") for name in friends: print(name) rdflib-6.1.1/examples/smushing.py000066400000000000000000000027721415774155300170320ustar00rootroot00000000000000""" A FOAF smushing example. Filter a graph by normalizing all ``foaf:Persons`` into URIs based on their ``mbox_sha1sum``. Suppose I get two `FOAF `_ documents each talking about the same person (according to ``mbox_sha1sum``) but they each used a :class:`rdflib.term.BNode` for the subject. For this demo I've combined those two documents into one file: This filters a graph by changing every subject with a ``foaf:mbox_sha1sum`` into a new subject whose URI is based on the ``sha1sum``. This new graph might be easier to do some operations on. An advantage of this approach over other methods for collapsing BNodes is that I can incrementally process new FOAF documents as they come in without having to access my ever-growing archive. Even if another ``65b983bb397fb71849da910996741752ace8369b`` document comes in next year, I would still give it the same stable subject URI that merges with my existing data. """ from rdflib import Graph, Namespace from rdflib.namespace import FOAF STABLE = Namespace("http://example.com/person/mbox_sha1sum/") if __name__ == "__main__": g = Graph() g.parse("smushingdemo.n3", format="n3") newURI = {} # old subject : stable uri for s, p, o in g.triples((None, FOAF["mbox_sha1sum"], None)): newURI[s] = STABLE[o] out = Graph() out.bind("foaf", FOAF) for s, p, o in g: s = newURI.get(s, s) o = newURI.get(o, o) # might be linked to another person out.add((s, p, o)) print(out.serialize()) rdflib-6.1.1/examples/smushingdemo.n3000066400000000000000000000007521415774155300175630ustar00rootroot00000000000000@prefix foaf: . ## from one document _:p1 a foaf:Person; foaf:mbox_sha1sum "65b983bb397fb71849da910996741752ace8369b"; foaf:nick "mortenf"; foaf:weblog . ## from another document _:p2 a foaf:Person; foaf:mbox_sha1sum "65b983bb397fb71849da910996741752ace8369b"; foaf:nick "mortenf"; foaf:homepage ; foaf:interest . rdflib-6.1.1/examples/sparql_query_example.py000066400000000000000000000014531415774155300214320ustar00rootroot00000000000000""" SPARQL Query using :meth:`rdflib.graph.Graph.query` The method returns a :class:`~rdflib.query.Result`, iterating over this yields :class:`~rdflib.query.ResultRow` objects The variable bindings can be accessed as attributes of the row objects For variable names that are not valid python identifiers, dict access (i.e. with ``row[var] / __getitem__``) is also possible. :attr:`~rdflib.query.ResultRow.vars` contains the variables """ import rdflib if __name__ == "__main__": g = rdflib.Graph() g.load("foaf.n3", format="n3") # The QueryProcessor knows the FOAF prefix from the graph # which in turn knows it from reading the N3 RDF file for row in g.query("SELECT ?s WHERE { [] foaf:knows ?s .}"): print(row.s) # or row["s"] # or row[rdflib.Variable("s")] rdflib-6.1.1/examples/sparql_update_example.py000066400000000000000000000011241415774155300215420ustar00rootroot00000000000000""" SPARQL Update statements can be applied with :meth:`rdflib.graph.Graph.update` """ import rdflib if __name__ == "__main__": g = rdflib.Graph() g.load("foaf.n3", format="n3") print(f"Initially there are {len(g)} triples in the graph") g.update( """ PREFIX foaf: PREFIX dbpedia: INSERT { ?s a dbpedia:Human . } WHERE { ?s a foaf:Person . } """ ) print(f"After the UPDATE, there are {len(g)} triples in the graph") rdflib-6.1.1/examples/sparqlstore_example.py000066400000000000000000000035541415774155300212660ustar00rootroot00000000000000""" Simple examples showing how to use the SPARQLStore """ from rdflib import Graph, URIRef, Namespace from rdflib.plugins.stores.sparqlstore import SPARQLStore if __name__ == "__main__": dbo = Namespace("http://dbpedia.org/ontology/") # EXAMPLE 1: using a Graph with the Store type string set to "SPARQLStore" graph = Graph("SPARQLStore", identifier="http://dbpedia.org") graph.open("http://dbpedia.org/sparql") pop = graph.value(URIRef("http://dbpedia.org/resource/Berlin"), dbo.populationTotal) print( "According to DBPedia, Berlin has a population of {0:,}".format( int(pop) ).replace(",", ".") ) print() # EXAMPLE 2: using a SPARQLStore object directly st = SPARQLStore(query_endpoint="http://dbpedia.org/sparql") for p in st.objects( URIRef("http://dbpedia.org/resource/Brisbane"), dbo.populationTotal ): print( "According to DBPedia, Brisbane has a population of " "{0:,}".format(int(p), ",d") ) print() # EXAMPLE 3: doing RDFlib triple navigation using SPARQLStore as a Graph() print("Triple navigation using SPARQLStore as a Graph():") graph = Graph("SPARQLStore", identifier="http://dbpedia.org") graph.open("http://dbpedia.org/sparql") # we are asking DBPedia for 3 skos:Concept instances count = 0 from rdflib.namespace import RDF, SKOS for s in graph.subjects(predicate=RDF.type, object=SKOS.Concept): count += 1 print(f"\t- {s}") if count >= 3: break # EXAMPLE 4: using a SPARQL endpoint that requires Basic HTTP authentication # NOTE: this example won't run since the endpoint isn't live (or real) s = SPARQLStore( query_endpoint="http://fake-sparql-endpoint.com/repository/x", auth=("my_username", "my_password"), ) # do normal Graph things rdflib-6.1.1/examples/swap_primer.py000066400000000000000000000074561415774155300175310ustar00rootroot00000000000000""" This is a simple primer using some of the example stuff in the Primer on N3: http://www.w3.org/2000/10/swap/Primer """ from rdflib import ConjunctiveGraph, Namespace, Literal from rdflib.namespace import OWL, DC if __name__ == "__main__": # Firstly, it doesn't have to be so complex. # Here we create a "Graph" of our work. # Think of it as a blank piece of graph paper! primer = ConjunctiveGraph() myNS = Namespace("https://example.com/") primer.add((myNS.pat, myNS.knows, myNS.jo)) # or: primer.add((myNS["pat"], myNS["age"], Literal(24))) # Now, with just that, lets see how the system # recorded *way* too many details about what # you just asserted as fact. from pprint import pprint print("All the things in the Graph:") pprint(list(primer)) # just think .whatever((s, p, o)) # here we report on what we know print("==================") print("Subjects:") pprint(list(primer.subjects())) print("Predicates:") pprint(list(primer.predicates())) print("Objects:") pprint(list(primer.objects())) print("==================") # and other things that make sense print("What we know about pat:") pprint(list(primer.predicate_objects(myNS.pat))) print("Who is what age?") pprint(list(primer.subject_objects(myNS.age))) print("==================") print("==================") # Okay, so lets now work with a bigger # dataset from the example, and start # with a fresh new graph. del primer primer = ConjunctiveGraph() # Lets start with a verbatim string straight from the primer text: mySource = """ @prefix : . @prefix rdf: . @prefix rdfs: . @prefix owl: . @prefix dc: . @prefix foo: . @prefix swap: . <> dc:title "Primer - Getting into the Semantic Web and RDF using N3". :pat :knows :jo . :pat :age 24 . :al :is_child_of :pat . :pat :child :al, :chaz, :mo ; :age 24 ; :eyecolor "blue" . :Person a rdfs:Class . :Pat a :Person . :Woman a rdfs:Class; rdfs:subClassOf :Person . :sister a rdf:Property . :sister rdfs:domain :Person ; rdfs:range :Woman . :Woman = foo:FemaleAdult . :Title a rdf:Property; = dc:title . """ # --- End of primer code # To make this go easier to spit back out... # technically, we already created a namespace # with the object init (and it added some namespaces as well) # By default, your main namespace is the URI of your # current working directory, so lets make that simpler: primer.bind("owl", OWL) primer.bind("dc", DC) primer.bind("swap", "http://www.w3.org/2000/10/swap/") # Lets load it up! primer.parse(data=mySource, format="n3") # Now you can query, either directly straight into a list: print() print("Printing bigger example's triples:") for i in [(x, y, z) for x, y, z in primer]: print(i) # or spit it back out (mostly) the way we created it: print() print("Printing bigger example as N3:") print(primer.serialize(format="n3")) # for more insight into things already done, lets see the namespaces print() print("Printing bigger example's namespaces:") for n in list(primer.namespaces()): print(n) # lets ask something about the data, using a SPARQL query print() print("Who are pat's children?") q = "SELECT ?child WHERE { :pat :child ?child }" for r in primer.query(q): print(r) rdflib-6.1.1/examples/transitive.py000066400000000000000000000051001415774155300173510ustar00rootroot00000000000000""" An example illustrating how to use the :meth:`~rdflib.graph.Graph.transitive_subjects` and :meth:`~rdflib.graph.Graph.transitive_objects` graph methods Formal definition ^^^^^^^^^^^^^^^^^^ The :meth:`~rdflib.graph.Graph.transitive_objects` method finds all nodes such that there is a path from subject to one of those nodes using only the predicate property in the triples. The :meth:`~rdflib.graph.Graph.transitive_subjects` method is similar; it finds all nodes such that there is a path from the node to the object using only the predicate property. Informal description, with an example ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ In brief, :meth:`~rdflib.graph.Graph.transitive_objects` walks forward in a graph using a particular property, and :meth:`~rdflib.graph.Graph.transitive_subjects` walks backward. A good example uses a property ``ex:parent``, the semantics of which are biological parentage. The :meth:`~rdflib.graph.Graph.transitive_objects` method would get all the ancestors of a particular person (all nodes such that there is a parent path between the person and the object). The :meth:`~rdflib.graph.Graph.transitive_subjects` method would get all the descendants of a particular person (all nodes such that there is a parent path between the node and the person). So, say that your URI is ``ex:person``. This example would get all of your (known) ancestors, and then get all the (known) descendants of your maternal grandmother. .. warning:: The :meth:`transitive_objects` method has the start node as the *first* argument, but the :meth:`transitive_subjects` method has the start node as the *second* argument. User-defined transitive closures ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The method :meth:`~rdflib.graph.Graph.transitiveClosure` returns transtive closures of user-defined functions. """ if __name__ == "__main__": from rdflib import ConjunctiveGraph, URIRef person = URIRef("ex:person") dad = URIRef("ex:d") mom = URIRef("ex:m") momOfDad = URIRef("ex:gm0") momOfMom = URIRef("ex:gm1") dadOfDad = URIRef("ex:gf0") dadOfMom = URIRef("ex:gf1") parent = URIRef("ex:parent") g = ConjunctiveGraph() g.add((person, parent, dad)) g.add((person, parent, mom)) g.add((dad, parent, momOfDad)) g.add((dad, parent, dadOfDad)) g.add((mom, parent, momOfMom)) g.add((mom, parent, dadOfMom)) print("Parents, forward from `ex:person`:") for i in g.transitive_objects(person, parent): print(i) print("Parents, *backward* from `ex:gm1`:") for i in g.transitive_subjects(parent, momOfMom): print(i) rdflib-6.1.1/pull_request_template.md000066400000000000000000000000521415774155300177330ustar00rootroot00000000000000Fixes # ## Proposed Changes - - - rdflib-6.1.1/rdflib/000077500000000000000000000000001415774155300142375ustar00rootroot00000000000000rdflib-6.1.1/rdflib/__init__.py000066400000000000000000000110521415774155300163470ustar00rootroot00000000000000"""A pure Python package providing the core RDF constructs. The packages is intended to provide the core RDF types and interfaces for working with RDF. The package defines a plugin interface for parsers, stores, and serializers that other packages can use to implement parsers, stores, and serializers that will plug into the rdflib package. The primary interface `rdflib` exposes to work with RDF is `rdflib.graph.Graph`. A tiny example: >>> from rdflib import Graph, URIRef, Literal >>> g = Graph() >>> result = g.parse("http://www.w3.org/2000/10/swap/test/meet/blue.rdf") >>> print("graph has %s statements." % len(g)) graph has 4 statements. >>> >>> for s, p, o in g: ... if (s, p, o) not in g: ... raise Exception("It better be!") >>> s = g.serialize(format='nt') >>> >>> sorted(g) == [ ... (URIRef("http://meetings.example.com/cal#m1"), ... URIRef("http://www.example.org/meeting_organization#homePage"), ... URIRef("http://meetings.example.com/m1/hp")), ... (URIRef("http://www.example.org/people#fred"), ... URIRef("http://www.example.org/meeting_organization#attending"), ... URIRef("http://meetings.example.com/cal#m1")), ... (URIRef("http://www.example.org/people#fred"), ... URIRef("http://www.example.org/personal_details#GivenName"), ... Literal("Fred")), ... (URIRef("http://www.example.org/people#fred"), ... URIRef("http://www.example.org/personal_details#hasEmail"), ... URIRef("mailto:fred@example.com")) ... ] True """ __docformat__ = "restructuredtext en" # The format of the __version__ line is matched by a regex in setup.py __version__ = "6.1.1" __date__ = "2021-12-20" __all__ = [ "URIRef", "BNode", "Literal", "Variable", "Namespace", "Dataset", "Graph", "ConjunctiveGraph", "BRICK", "CSVW", "DC", "DCAT", "DCMITYPE", "DCTERMS", "DOAP", "FOAF", "ODRL2", "ORG", "OWL", "PROF", "PROV", "QB", "RDF", "RDFS", "SDO", "SH", "SKOS", "SOSA", "SSN", "TIME", "VANN", "VOID", "XSD", "util", ] import sys import logging logger = logging.getLogger(__name__) try: import __main__ if ( not hasattr(__main__, "__file__") and sys.stdout is not None and sys.stderr.isatty() ): # show log messages in interactive mode logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler()) del __main__ except ImportError: # Main already imported from elsewhere import warnings warnings.warn("__main__ already imported", ImportWarning) del warnings del sys NORMALIZE_LITERALS = True """ If True - Literals lexical forms are normalized when created. I.e. the lexical forms is parsed according to data-type, then the stored lexical form is the re-serialized value that was parsed. Illegal values for a datatype are simply kept. The normalized keyword for Literal.__new__ can override this. For example: >>> from rdflib import Literal,XSD >>> Literal("01", datatype=XSD.int) rdflib.term.Literal("1", datatype=rdflib.term.URIRef("http://www.w3.org/2001/XMLSchema#integer")) This flag may be changed at any time, but will only affect literals created after that time, previously created literals will remain (un)normalized. """ DAWG_LITERAL_COLLATION = False """ DAWG_LITERAL_COLLATION determines how literals are ordered or compared to each other. In SPARQL, applying the >,<,>=,<= operators to literals of incompatible data-types is an error, i.e: Literal(2)>Literal('cake') is neither true nor false, but an error. This is a problem in PY3, where lists of Literals of incompatible types can no longer be sorted. Setting this flag to True gives you strict DAWG/SPARQL compliance, setting it to False will order Literals with incompatible datatypes by datatype URI In particular, this determines how the rich comparison operators for Literal work, eq, __neq__, __lt__, etc. """ from rdflib.term import URIRef, BNode, Literal, Variable from rdflib.graph import Dataset, Graph, ConjunctiveGraph from rdflib import plugin from rdflib import query from rdflib.namespace import ( BRICK, CSVW, DC, DCAT, DCMITYPE, DCTERMS, DOAP, FOAF, ODRL2, ORG, OWL, PROF, PROV, QB, RDF, RDFS, SDO, SH, SKOS, SOSA, SSN, TIME, VANN, VOID, XMLNS, XSD, Namespace, ) # tedious sop to flake8 assert plugin assert query from rdflib import util from rdflib.container import * rdflib-6.1.1/rdflib/collection.py000066400000000000000000000227131415774155300167510ustar00rootroot00000000000000from rdflib.namespace import RDF from rdflib.term import BNode from rdflib.term import Literal __all__ = ["Collection"] class Collection(object): __doc__ = """ See "Emulating container types": https://docs.python.org/reference/datamodel.html#emulating-container-types >>> from rdflib.graph import Graph >>> from pprint import pprint >>> listName = BNode() >>> g = Graph('Memory') >>> listItem1 = BNode() >>> listItem2 = BNode() >>> g.add((listName, RDF.first, Literal(1))) # doctest: +ELLIPSIS )> >>> g.add((listName, RDF.rest, listItem1)) # doctest: +ELLIPSIS )> >>> g.add((listItem1, RDF.first, Literal(2))) # doctest: +ELLIPSIS )> >>> g.add((listItem1, RDF.rest, listItem2)) # doctest: +ELLIPSIS )> >>> g.add((listItem2, RDF.rest, RDF.nil)) # doctest: +ELLIPSIS )> >>> g.add((listItem2, RDF.first, Literal(3))) # doctest: +ELLIPSIS )> >>> c = Collection(g,listName) >>> pprint([term.n3() for term in c]) [u'"1"^^', u'"2"^^', u'"3"^^'] >>> Literal(1) in c True >>> len(c) 3 >>> c._get_container(1) == listItem1 True >>> c.index(Literal(2)) == 1 True """ def __init__(self, graph, uri, seq=[]): self.graph = graph self.uri = uri or BNode() self += seq def n3(self): """ >>> from rdflib.graph import Graph >>> listName = BNode() >>> g = Graph('Memory') >>> listItem1 = BNode() >>> listItem2 = BNode() >>> g.add((listName, RDF.first, Literal(1))) # doctest: +ELLIPSIS )> >>> g.add((listName, RDF.rest, listItem1)) # doctest: +ELLIPSIS )> >>> g.add((listItem1, RDF.first, Literal(2))) # doctest: +ELLIPSIS )> >>> g.add((listItem1, RDF.rest, listItem2)) # doctest: +ELLIPSIS )> >>> g.add((listItem2, RDF.rest, RDF.nil)) # doctest: +ELLIPSIS )> >>> g.add((listItem2, RDF.first, Literal(3))) # doctest: +ELLIPSIS )> >>> c = Collection(g, listName) >>> print(c.n3()) #doctest: +NORMALIZE_WHITESPACE ( "1"^^ "2"^^ "3"^^ ) """ return "( %s )" % (" ".join([i.n3() for i in self])) def _get_container(self, index): """Gets the first, rest holding node at index.""" assert isinstance(index, int) graph = self.graph container = self.uri i = 0 while i < index: i += 1 container = graph.value(container, RDF.rest) if container is None: break return container def __len__(self): """length of items in collection.""" return len(list(self.graph.items(self.uri))) def index(self, item): """ Returns the 0-based numerical index of the item in the list """ listName = self.uri index = 0 while True: if (listName, RDF.first, item) in self.graph: return index else: newLink = list(self.graph.objects(listName, RDF.rest)) index += 1 if newLink == [RDF.nil]: raise ValueError("%s is not in %s" % (item, self.uri)) elif not newLink: raise Exception("Malformed RDF Collection: %s" % self.uri) else: assert len(newLink) == 1, "Malformed RDF Collection: %s" % self.uri listName = newLink[0] def __getitem__(self, key): """TODO""" c = self._get_container(key) if c: v = self.graph.value(c, RDF.first) if v: return v else: raise KeyError(key) else: raise IndexError(key) def __setitem__(self, key, value): """TODO""" c = self._get_container(key) if c: self.graph.set((c, RDF.first, value)) else: raise IndexError(key) def __delitem__(self, key): """ >>> from rdflib.namespace import RDF, RDFS >>> from rdflib import Graph >>> from pprint import pformat >>> g = Graph() >>> a = BNode('foo') >>> b = BNode('bar') >>> c = BNode('baz') >>> g.add((a, RDF.first, RDF.type)) # doctest: +ELLIPSIS )> >>> g.add((a, RDF.rest, b)) # doctest: +ELLIPSIS )> >>> g.add((b, RDF.first, RDFS.label)) # doctest: +ELLIPSIS )> >>> g.add((b, RDF.rest, c)) # doctest: +ELLIPSIS )> >>> g.add((c, RDF.first, RDFS.comment)) # doctest: +ELLIPSIS )> >>> g.add((c, RDF.rest, RDF.nil)) # doctest: +ELLIPSIS )> >>> len(g) 6 >>> def listAncestry(node, graph): ... for i in graph.subjects(RDF.rest, node): ... yield i >>> [str(node.n3()) ... for node in g.transitiveClosure(listAncestry, RDF.nil)] ['_:baz', '_:bar', '_:foo'] >>> lst = Collection(g, a) >>> len(lst) 3 >>> b == lst._get_container(1) True >>> c == lst._get_container(2) True >>> del lst[1] >>> len(lst) 2 >>> len(g) 4 """ self[key] # to raise any potential key exceptions graph = self.graph current = self._get_container(key) assert current if len(self) == 1 and key > 0: pass elif key == len(self) - 1: # the tail priorLink = self._get_container(key - 1) self.graph.set((priorLink, RDF.rest, RDF.nil)) graph.remove((current, None, None)) else: next = self._get_container(key + 1) prior = self._get_container(key - 1) assert next and prior graph.remove((current, None, None)) graph.set((prior, RDF.rest, next)) def __iter__(self): """Iterator over items in Collections""" return self.graph.items(self.uri) def _end(self): # find end of list container = self.uri while True: rest = self.graph.value(container, RDF.rest) if rest is None or rest == RDF.nil: return container else: container = rest def append(self, item): """ >>> from rdflib.graph import Graph >>> listName = BNode() >>> g = Graph() >>> c = Collection(g,listName,[Literal(1),Literal(2)]) >>> links = [ ... list(g.subjects(object=i, predicate=RDF.first))[0] for i in c] >>> len([i for i in links if (i, RDF.rest, RDF.nil) in g]) 1 """ end = self._end() if (end, RDF.first, None) in self.graph: # append new node to the end of the linked list node = BNode() self.graph.set((end, RDF.rest, node)) end = node self.graph.add((end, RDF.first, item)) self.graph.add((end, RDF.rest, RDF.nil)) return self def __iadd__(self, other): end = self._end() self.graph.remove((end, RDF.rest, None)) for item in other: if (end, RDF.first, None) in self.graph: nxt = BNode() self.graph.add((end, RDF.rest, nxt)) end = nxt self.graph.add((end, RDF.first, item)) self.graph.add((end, RDF.rest, RDF.nil)) return self def clear(self): container = self.uri graph = self.graph while container: rest = graph.value(container, RDF.rest) graph.remove((container, RDF.first, None)) graph.remove((container, RDF.rest, None)) container = rest return self def test(): import doctest doctest.testmod() if __name__ == "__main__": test() from rdflib import Graph g = Graph() c = Collection(g, BNode()) assert len(c) == 0 c = Collection(g, BNode(), [Literal("1"), Literal("2"), Literal("3"), Literal("4")]) assert len(c) == 4 assert c[1] == Literal("2"), c[1] del c[1] assert list(c) == [Literal("1"), Literal("3"), Literal("4")], list(c) try: del c[500] except IndexError: pass c.append(Literal("5")) print(list(c)) for i in c: print(i) del c[3] c.clear() assert len(c) == 0 rdflib-6.1.1/rdflib/compare.py000066400000000000000000000517301415774155300162450ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ A collection of utilities for canonicalizing and inspecting graphs. Among other things, they solve of the problem of deterministic bnode comparisons. Warning: the time to canonicalize bnodes may increase exponentially on degenerate larger graphs. Use with care! Example of comparing two graphs:: >>> g1 = Graph().parse(format='n3', data=''' ... @prefix : . ... :rel ... , ... [ :label "Same" ], ... , ... [ :label "A" ] . ... ''') >>> g2 = Graph().parse(format='n3', data=''' ... @prefix : . ... :rel ... , ... [ :label "Same" ], ... , ... [ :label "B" ] . ... ''') >>> >>> iso1 = to_isomorphic(g1) >>> iso2 = to_isomorphic(g2) These are not isomorphic:: >>> iso1 == iso2 False Diff the two graphs:: >>> in_both, in_first, in_second = graph_diff(iso1, iso2) Present in both:: >>> def dump_nt_sorted(g): ... for l in sorted(g.serialize(format='nt').splitlines()): ... if l: print(l.decode('ascii')) >>> dump_nt_sorted(in_both) #doctest: +SKIP . _:cbcaabaaba17fecbc304a64f8edee4335e . _:cbcaabaaba17fecbc304a64f8edee4335e "Same" . Only in first:: >>> dump_nt_sorted(in_first) #doctest: +SKIP . _:cb124e4c6da0579f810c0ffe4eff485bd9 . _:cb124e4c6da0579f810c0ffe4eff485bd9 "A" . Only in second:: >>> dump_nt_sorted(in_second) #doctest: +SKIP . _:cb558f30e21ddfc05ca53108348338ade8 . _:cb558f30e21ddfc05ca53108348338ade8 "B" . """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # TODO: # - Doesn't handle quads. # - Add warning and/or safety mechanism before working on large graphs? # - use this in existing Graph.isomorphic? __all__ = [ "IsomorphicGraph", "to_isomorphic", "isomorphic", "to_canonical_graph", "graph_diff", "similar", ] from rdflib.graph import Graph, ConjunctiveGraph, ReadOnlyGraphAggregate from rdflib.term import BNode, Node, URIRef from hashlib import sha256 from datetime import datetime from collections import defaultdict from typing import ( Set, Dict, TYPE_CHECKING, Union, List, Tuple, Callable, Optional, Iterator, ) if TYPE_CHECKING: from _hashlib import HASH def _total_seconds(td): result = td.days * 24 * 60 * 60 result += td.seconds result += td.microseconds / 1000000.0 return result class _runtime(object): def __init__(self, label): self.label = label def __call__(self, f): if self.label is None: self.label = f.__name__ + "_runtime" def wrapped_f(*args, **kwargs): start = datetime.now() result = f(*args, **kwargs) if "stats" in kwargs and kwargs["stats"] is not None: stats = kwargs["stats"] stats[self.label] = _total_seconds(datetime.now() - start) return result return wrapped_f class _call_count(object): def __init__(self, label): self.label = label def __call__(self, f): if self.label is None: self.label = f.__name__ + "_runtime" def wrapped_f(*args, **kwargs): if "stats" in kwargs and kwargs["stats"] is not None: stats = kwargs["stats"] if self.label not in stats: stats[self.label] = 0 stats[self.label] += 1 return f(*args, **kwargs) return wrapped_f class IsomorphicGraph(ConjunctiveGraph): """An implementation of the RGDA1 graph digest algorithm. An implementation of RGDA1 (publication below), a combination of Sayers & Karp's graph digest algorithm using sum and SHA-256 and traces , an average case polynomial time algorithm for graph canonicalization. McCusker, J. P. (2015). WebSig: A Digital Signature Framework for the Web. Rensselaer Polytechnic Institute, Troy, NY. http://gradworks.umi.com/3727015.pdf """ def __init__(self, **kwargs): super(IsomorphicGraph, self).__init__(**kwargs) def __eq__(self, other): """Graph isomorphism testing.""" if not isinstance(other, IsomorphicGraph): return False elif len(self) != len(other): return False return self.internal_hash() == other.internal_hash() def __ne__(self, other): """Negative graph isomorphism testing.""" return not self.__eq__(other) def __hash__(self): return super(IsomorphicGraph, self).__hash__() def graph_digest(self, stats=None): """Synonym for IsomorphicGraph.internal_hash.""" return self.internal_hash(stats=stats) def internal_hash(self, stats=None): """ This is defined instead of __hash__ to avoid a circular recursion scenario with the Memory store for rdflib which requires a hash lookup in order to return a generator of triples. """ return _TripleCanonicalizer(self).to_hash(stats=stats) HashFunc = Callable[[str], int] ColorItem = Tuple[Union[int, str], URIRef, Union[int, str]] ColorItemTuple = Tuple[ColorItem, ...] HashCache = Optional[Dict[ColorItemTuple, str]] Stats = Dict[str, Union[int, str]] class Color: def __init__( self, nodes: List[Node], hashfunc: HashFunc, color: ColorItemTuple = (), hash_cache: HashCache = None, ): if hash_cache is None: hash_cache = {} self._hash_cache = hash_cache self.color = color self.nodes = nodes self.hashfunc = hashfunc self._hash_color = None def __str__(self): nodes, color = self.key() return "Color %s (%s nodes)" % (color, nodes) def key(self): return (len(self.nodes), self.hash_color()) def hash_color(self, color: Optional[Tuple[ColorItem, ...]] = None) -> str: if color is None: color = self.color if color in self._hash_cache: return self._hash_cache[color] def stringify(x): if isinstance(x, Node): return x.n3() else: return str(x) if isinstance(color, Node): return stringify(color) value = 0 for triple in color: value += self.hashfunc(" ".join([stringify(x) for x in triple])) val: str = "%x" % value self._hash_cache[color] = val return val def distinguish(self, W: "Color", graph: Graph): colors: Dict[str, Color] = {} for n in self.nodes: new_color: Tuple[ColorItem, ...] = list(self.color) # type: ignore[assignment] for node in W.nodes: new_color += [ # type: ignore[operator] (1, p, W.hash_color()) for s, p, o in graph.triples((n, None, node)) ] new_color += [ # type: ignore[operator] (W.hash_color(), p, 3) for s, p, o in graph.triples((node, None, n)) ] new_color = tuple(new_color) new_hash_color = self.hash_color(new_color) if new_hash_color not in colors: c = Color([], self.hashfunc, new_color, hash_cache=self._hash_cache) colors[new_hash_color] = c colors[new_hash_color].nodes.append(n) return colors.values() def discrete(self): return len(self.nodes) == 1 def copy(self): return Color( self.nodes[:], self.hashfunc, self.color, hash_cache=self._hash_cache ) _TripleT = List[Node] _HashT = Callable[[], "HASH"] class _TripleCanonicalizer(object): def __init__(self, graph: Graph, hashfunc: _HashT = sha256): self.graph = graph def _hashfunc(s: str): h = hashfunc() h.update(str(s).encode("utf8")) return int(h.hexdigest(), 16) self._hash_cache: HashCache = {} self.hashfunc = _hashfunc def _discrete(self, coloring: List[Color]) -> bool: return len([c for c in coloring if not c.discrete()]) == 0 def _initial_color(self) -> List[Color]: """Finds an initial color for the graph. Finds an initial color of the graph by finding all blank nodes and non-blank nodes that are adjacent. Nodes that are not adjacent to blank nodes are not included, as they are a) already colored (by URI or literal) and b) do not factor into the color of any blank node. """ bnodes: Set[BNode] = set() others = set() self._neighbors = defaultdict(set) for s, p, o in self.graph: nodes = set([s, p, o]) b = set([x for x in nodes if isinstance(x, BNode)]) if len(b) > 0: others |= nodes - b bnodes |= b if isinstance(s, BNode): self._neighbors[s].add(o) if isinstance(o, BNode): self._neighbors[o].add(s) if isinstance(p, BNode): self._neighbors[p].add(s) self._neighbors[p].add(p) if len(bnodes) > 0: return [Color(list(bnodes), self.hashfunc, hash_cache=self._hash_cache)] + [ Color([x], self.hashfunc, x, hash_cache=self._hash_cache) for x in others ] else: return [] def _individuate(self, color, individual): new_color = list(color.color) new_color.append((len(color.nodes),)) color.nodes.remove(individual) c = Color( [individual], self.hashfunc, tuple(new_color), hash_cache=self._hash_cache ) return c def _get_candidates(self, coloring: List[Color]) -> Iterator[Tuple[Node, Color]]: for c in [c for c in coloring if not c.discrete()]: for node in c.nodes: yield node, c def _refine(self, coloring: List[Color], sequence: List[Color]) -> List[Color]: sequence = sorted(sequence, key=lambda x: x.key(), reverse=True) coloring = coloring[:] while len(sequence) > 0 and not self._discrete(coloring): W = sequence.pop() for c in coloring[:]: if len(c.nodes) > 1 or isinstance(c.nodes[0], BNode): colors = sorted( c.distinguish(W, self.graph), key=lambda x: x.key(), reverse=True, ) coloring.remove(c) coloring.extend(colors) try: si = sequence.index(c) sequence = sequence[:si] + colors + sequence[si + 1 :] except ValueError: sequence = colors[1:] + sequence combined_colors: List[Color] = [] combined_color_map: Dict[str, Color] = dict() for color in coloring: color_hash = color.hash_color() # This is a hash collision, and be combined into a single color for individuation. if color_hash in combined_color_map: combined_color_map[color_hash].nodes.extend(color.nodes) else: combined_colors.append(color) combined_color_map[color_hash] = color return combined_colors @_runtime("to_hash_runtime") def to_hash(self, stats: Optional[Stats] = None): result = 0 for triple in self.canonical_triples(stats=stats): result += self.hashfunc(" ".join([x.n3() for x in triple])) if stats is not None: stats["graph_digest"] = "%x" % result return result def _experimental_path(self, coloring: List[Color]) -> List[Color]: coloring = [c.copy() for c in coloring] while not self._discrete(coloring): color = [x for x in coloring if not x.discrete()][0] node = color.nodes[0] new_color = self._individuate(color, node) coloring.append(new_color) coloring = self._refine(coloring, [new_color]) return coloring def _create_generator( self, colorings: List[List[Color]], groupings: Optional[Dict[Node, Set[Node]]] = None, ) -> Dict[Node, Set[Node]]: if not groupings: groupings = defaultdict(set) for group in zip(*colorings): g = set([c.nodes[0] for c in group]) for n in group: g |= groupings[n] for n in g: groupings[n] = g return groupings @_call_count("individuations") def _traces( self, coloring: List[Color], stats: Optional[Stats] = None, depth: List[int] = [0], ) -> List[Color]: if stats is not None and "prunings" not in stats: stats["prunings"] = 0 depth[0] += 1 candidates = self._get_candidates(coloring) best: List[List[Color]] = [] best_score = None best_experimental_score = None last_coloring = None generator: Dict[Node, Set[Node]] = defaultdict(set) visited: Set[Node] = set() for candidate, color in candidates: if candidate in generator: v = generator[candidate] & visited if len(v) > 0: visited.add(candidate) continue visited.add(candidate) coloring_copy: List[Color] = [] color_copy = None for c in coloring: c_copy = c.copy() coloring_copy.append(c_copy) if c == color: color_copy = c_copy new_color = self._individuate(color_copy, candidate) coloring_copy.append(new_color) refined_coloring = self._refine(coloring_copy, [new_color]) color_score = tuple([c.key() for c in refined_coloring]) experimental = self._experimental_path(coloring_copy) experimental_score = set([c.key() for c in experimental]) if last_coloring: generator = self._create_generator( # type: ignore[unreachable] [last_coloring, experimental], generator ) last_coloring = experimental if best_score is None or best_score < color_score: # type: ignore[unreachable] best = [refined_coloring] best_score = color_score best_experimental_score = experimental_score elif best_score > color_score: # type: ignore[unreachable] # prune this branch. if stats is not None: stats["prunings"] += 1 elif experimental_score != best_experimental_score: best.append(refined_coloring) else: # prune this branch. if stats is not None: stats["prunings"] += 1 discrete: List[List[Color]] = [x for x in best if self._discrete(x)] if len(discrete) == 0: best_score = None best_depth = None for coloring in best: d = [depth[0]] new_color = self._traces(coloring, stats=stats, depth=d) color_score = tuple([c.key() for c in refined_coloring]) if best_score is None or color_score > best_score: # type: ignore[unreachable] discrete = [new_color] best_score = color_score best_depth = d[0] depth[0] = best_depth # type: ignore[assignment] return discrete[0] def canonical_triples(self, stats: Optional[Stats] = None): if stats is not None: start_coloring = datetime.now() coloring = self._initial_color() if stats is not None: stats["triple_count"] = len(self.graph) stats["adjacent_nodes"] = max(0, len(coloring) - 1) coloring = self._refine(coloring, coloring[:]) if stats is not None: stats["initial_coloring_runtime"] = _total_seconds( datetime.now() - start_coloring ) stats["initial_color_count"] = len(coloring) if not self._discrete(coloring): depth = [0] coloring = self._traces(coloring, stats=stats, depth=depth) if stats is not None: stats["tree_depth"] = depth[0] elif stats is not None: stats["individuations"] = 0 stats["tree_depth"] = 0 if stats is not None: stats["color_count"] = len(coloring) bnode_labels = dict([(c.nodes[0], c.hash_color()) for c in coloring]) if stats is not None: stats["canonicalize_triples_runtime"] = _total_seconds( datetime.now() - start_coloring ) for triple in self.graph: result = tuple(self._canonicalize_bnodes(triple, bnode_labels)) yield result def _canonicalize_bnodes( self, triple: Tuple[Node, Node, Node], labels: Dict[Node, str] ): for term in triple: if isinstance(term, BNode): yield BNode(value="cb%s" % labels[term]) else: yield term def to_isomorphic(graph): if isinstance(graph, IsomorphicGraph): return graph result = IsomorphicGraph() if hasattr(graph, "identifier"): result = IsomorphicGraph(identifier=graph.identifier) result += graph return result def isomorphic(graph1, graph2): """Compare graph for equality. Uses an algorithm to compute unique hashes which takes bnodes into account. Examples:: >>> g1 = Graph().parse(format='n3', data=''' ... @prefix : . ... :rel . ... :rel . ... :rel [ :label "A bnode." ] . ... ''') >>> g2 = Graph().parse(format='n3', data=''' ... @prefix ns: . ... ns:rel [ ns:label "A bnode." ] . ... ns:rel , ... . ... ''') >>> isomorphic(g1, g2) True >>> g3 = Graph().parse(format='n3', data=''' ... @prefix : . ... :rel . ... :rel . ... :rel . ... ''') >>> isomorphic(g1, g3) False """ gd1 = _TripleCanonicalizer(graph1).to_hash() gd2 = _TripleCanonicalizer(graph2).to_hash() return gd1 == gd2 def to_canonical_graph(g1, stats=None): """Creates a canonical, read-only graph. Creates a canonical, read-only graph where all bnode id:s are based on deterministical SHA-256 checksums, correlated with the graph contents. """ graph = Graph() graph += _TripleCanonicalizer(g1).canonical_triples(stats=stats) return ReadOnlyGraphAggregate([graph]) def graph_diff(g1, g2): """Returns three sets of triples: "in both", "in first" and "in second".""" # bnodes have deterministic values in canonical graphs: cg1 = to_canonical_graph(g1) cg2 = to_canonical_graph(g2) in_both = cg1 * cg2 in_first = cg1 - cg2 in_second = cg2 - cg1 return (in_both, in_first, in_second) _MOCK_BNODE = BNode() def similar(g1, g2): """Checks if the two graphs are "similar". Checks if the two graphs are "similar", by comparing sorted triples where all bnodes have been replaced by a singular mock bnode (the ``_MOCK_BNODE``). This is a much cheaper, but less reliable, alternative to the comparison algorithm in ``isomorphic``. """ return all(t1 == t2 for (t1, t2) in _squashed_graphs_triples(g1, g2)) def _squashed_graphs_triples(g1, g2): for (t1, t2) in zip(sorted(_squash_graph(g1)), sorted(_squash_graph(g2))): yield t1, t2 def _squash_graph(graph): return (_squash_bnodes(triple) for triple in graph) def _squash_bnodes(triple): return tuple((isinstance(t, BNode) and _MOCK_BNODE) or t for t in triple) rdflib-6.1.1/rdflib/compat.py000066400000000000000000000056531415774155300161050ustar00rootroot00000000000000""" Utility functions and objects to ease Python 2/3 compatibility, and different versions of support libraries. """ import re import codecs import warnings import typing as t if t.TYPE_CHECKING: import xml.etree.ElementTree as etree else: try: from lxml import etree except ImportError: import xml.etree.ElementTree as etree try: etree_register_namespace = etree.register_namespace except AttributeError: import xml.etree.ElementTree as etreenative def etree_register_namespace(prefix, uri): etreenative._namespace_map[uri] = prefix def cast_bytes(s, enc="utf-8"): if isinstance(s, str): return s.encode(enc) return s def ascii(stream): return codecs.getreader("ascii")(stream) def bopen(*args, **kwargs): return open(*args, mode="rb", **kwargs) long_type = int def sign(n): if n < 0: return -1 if n > 0: return 1 return 0 r_unicodeEscape = re.compile(r"(\\u[0-9A-Fa-f]{4}|\\U[0-9A-Fa-f]{8})") def _unicodeExpand(s): return r_unicodeEscape.sub(lambda m: chr(int(m.group(0)[2:], 16)), s) narrow_build = False try: chr(0x10FFFF) except ValueError: narrow_build = True if narrow_build: def _unicodeExpand(s): try: return r_unicodeEscape.sub(lambda m: chr(int(m.group(0)[2:], 16)), s) except ValueError: warnings.warn( "Encountered a unicode char > 0xFFFF in a narrow python build. " "Trying to degrade gracefully, but this can cause problems " "later when working with the string:\n%s" % s ) return r_unicodeEscape.sub( lambda m: codecs.decode(m.group(0), "unicode_escape"), s ) def decodeStringEscape(s): r""" s is byte-string - replace \ escapes in string """ s = s.replace("\\t", "\t") s = s.replace("\\n", "\n") s = s.replace("\\r", "\r") s = s.replace("\\b", "\b") s = s.replace("\\f", "\f") s = s.replace('\\"', '"') s = s.replace("\\'", "'") s = s.replace("\\\\", "\\") return s # return _unicodeExpand(s) # hmm - string escape doesn't do unicode escaping def decodeUnicodeEscape(s): """ s is a unicode string replace ``\\n`` and ``\\u00AC`` unicode escapes """ if "\\" not in s: # Most of times, there are no backslashes in strings. # In the general case, it could use maketrans and translate. return s s = s.replace("\\t", "\t") s = s.replace("\\n", "\n") s = s.replace("\\r", "\r") s = s.replace("\\b", "\b") s = s.replace("\\f", "\f") s = s.replace('\\"', '"') s = s.replace("\\'", "'") s = s.replace("\\\\", "\\") s = _unicodeExpand(s) # hmm - string escape doesn't do unicode escaping return s # Migration to abc in Python 3.8 try: from collections.abc import Mapping, MutableMapping except: from collections import Mapping, MutableMapping rdflib-6.1.1/rdflib/container.py000066400000000000000000000173701415774155300166030ustar00rootroot00000000000000from rdflib.namespace import RDF from rdflib.term import BNode, URIRef from random import randint __all__ = ["Container", "Bag", "Seq", "Alt", "NoElementException"] class Container(object): """A class for constructing RDF containers, as per https://www.w3.org/TR/rdf11-mt/#rdf-containers Basic usage, creating a ``Bag`` and adding to it:: >>> from rdflib import Graph, BNode, Literal, Bag >>> g = Graph() >>> b = Bag(g, BNode(), [Literal("One"), Literal("Two"), Literal("Three")]) >>> print(g.serialize(format="turtle")) @prefix rdf: . [] a rdf:Bag ; rdf:_1 "One" ; rdf:_2 "Two" ; rdf:_3 "Three" . >>> # print out an item using an index reference >>> print(b[2]) Two >>> # add a new item >>> b.append(Literal("Hello")) # doctest: +ELLIPSIS >>> print(g.serialize(format="turtle")) @prefix rdf: . [] a rdf:Bag ; rdf:_1 "One" ; rdf:_2 "Two" ; rdf:_3 "Three" ; rdf:_4 "Hello" . """ def __init__(self, graph, uri, seq=[], rtype="Bag"): """Creates a Container :param graph: a Graph instance :param uri: URI or Blank Node of the Container :param seq: the elements of the Container :param rtype: the type of Container, one of "Bag", "Seq" or "Alt" """ self.graph = graph self.uri = uri or BNode() self._len = 0 self._rtype = rtype # rdf:Bag or rdf:Seq or rdf:Alt self.append_multiple(seq) # adding triple corresponding to container type self.graph.add((self.uri, RDF.type, RDF[self._rtype])) def n3(self): items = [] for i in range(len(self)): v = self[i + 1] items.append(v) return "( %s )" % " ".join([a.n3() for a in items]) def _get_container(self): """Returns the URI of the container""" return self.uri def __len__(self): """Number of items in container""" return self._len def type_of_conatiner(self): return self._rtype def index(self, item): """Returns the 1-based numerical index of the item in the container""" pred = self.graph.predicates(self.uri, item) if not pred: raise ValueError("%s is not in %s" % (item, "container")) LI_INDEX = URIRef(str(RDF) + "_") i = None for p in pred: i = int(p.replace(LI_INDEX, "")) return i def __getitem__(self, key): """Returns item of the container at index key""" c = self._get_container() assert isinstance(key, int) elem_uri = str(RDF) + "_" + str(key) if key <= 0 or key > len(self): raise KeyError(key) v = self.graph.value(c, URIRef(elem_uri)) if v: return v else: raise KeyError(key) def __setitem__(self, key, value): """Sets the item at index key or predicate rdf:_key of the container to value""" assert isinstance(key, int) c = self._get_container() elem_uri = str(RDF) + "_" + str(key) if key <= 0 or key > len(self): raise KeyError(key) self.graph.set((c, URIRef(elem_uri), value)) def __delitem__(self, key): """Removing the item with index key or predicate rdf:_key""" assert isinstance(key, int) if key <= 0 or key > len(self): raise KeyError(key) graph = self.graph container = self.uri elem_uri = str(RDF) + "_" + str(key) graph.remove((container, URIRef(elem_uri), None)) for j in range(key + 1, len(self) + 1): elem_uri = str(RDF) + "_" + str(j) v = graph.value(container, URIRef(elem_uri)) graph.remove((container, URIRef(elem_uri), v)) elem_uri = str(RDF) + "_" + str(j - 1) graph.add((container, URIRef(elem_uri), v)) self._len -= 1 def items(self): """Returns a list of all items in the container""" l_ = [] container = self.uri i = 1 while True: elem_uri = str(RDF) + "_" + str(i) if (container, URIRef(elem_uri), None) in self.graph: i += 1 l_.append(self.graph.value(container, URIRef(elem_uri))) else: break return l_ def end(self): # # find end index (1-based) of container container = self.uri i = 1 while True: elem_uri = str(RDF) + "_" + str(i) if (container, URIRef(elem_uri), None) in self.graph: i += 1 else: return i - 1 def append(self, item): """Adding item to the end of the container""" end = self.end() elem_uri = str(RDF) + "_" + str(end + 1) container = self.uri self.graph.add((container, URIRef(elem_uri), item)) self._len += 1 return self def append_multiple(self, other): """Adding multiple elements to the container to the end which are in python list other""" end = self.end() # it should return the last index container = self.uri for item in other: end += 1 self._len += 1 elem_uri = str(RDF) + "_" + str(end) self.graph.add((container, URIRef(elem_uri), item)) return self def clear(self): """Removing all elements from the container""" container = self.uri graph = self.graph i = 1 while True: elem_uri = str(RDF) + "_" + str(i) if (container, URIRef(elem_uri), None) in self.graph: graph.remove((container, URIRef(elem_uri), None)) i += 1 else: break self._len = 0 return self class Bag(Container): """Unordered container (no preference order of elements)""" def __init__(self, graph, uri, seq=[]): Container.__init__(self, graph, uri, seq, "Bag") class Alt(Container): def __init__(self, graph, uri, seq=[]): Container.__init__(self, graph, uri, seq, "Alt") def anyone(self): if len(self) == 0: raise NoElementException() else: p = randint(1, len(self)) item = self.__getitem__(p) return item class Seq(Container): def __init__(self, graph, uri, seq=[]): Container.__init__(self, graph, uri, seq, "Seq") def add_at_position(self, pos, item): assert isinstance(pos, int) if pos <= 0 or pos > len(self) + 1: raise ValueError("Invalid Position for inserting element in rdf:Seq") if pos == len(self) + 1: self.append(item) else: for j in range(len(self), pos - 1, -1): container = self._get_container() elem_uri = str(RDF) + "_" + str(j) v = self.graph.value(container, URIRef(elem_uri)) self.graph.remove((container, URIRef(elem_uri), v)) elem_uri = str(RDF) + "_" + str(j + 1) self.graph.add((container, URIRef(elem_uri), v)) elem_uri_pos = str(RDF) + "_" + str(pos) self.graph.add((container, URIRef(elem_uri_pos), item)) self._len += 1 return self class NoElementException(Exception): def __init__(self, message="rdf:Alt Container is empty"): self.message = message def __str__(self): return self.message rdflib-6.1.1/rdflib/events.py000066400000000000000000000052271415774155300161230ustar00rootroot00000000000000__doc__ = """ Dirt Simple Events A Dispatcher (or a subclass of Dispatcher) stores event handlers that are 'fired' simple event objects when interesting things happen. Create a dispatcher: >>> d = Dispatcher() Now create a handler for the event and subscribe it to the dispatcher to handle Event events. A handler is a simple function or method that accepts the event as an argument: >>> def handler1(event): print(repr(event)) >>> d.subscribe(Event, handler1) # doctest: +ELLIPSIS Now dispatch a new event into the dispatcher, and see handler1 get fired: >>> d.dispatch(Event(foo='bar', data='yours', used_by='the event handlers')) """ __all__ = ["Event", "Dispatcher"] class Event(object): """ An event is a container for attributes. The source of an event creates this object, or a subclass, gives it any kind of data that the events handlers need to handle the event, and then calls notify(event). The target of an event registers a function to handle the event it is interested with subscribe(). When a sources calls notify(event), each subscriber to that event will be called in no particular order. """ def __init__(self, **kw): self.__dict__.update(kw) def __repr__(self): attrs = sorted(self.__dict__.keys()) return "" % ([a for a in attrs],) class Dispatcher(object): """ An object that can dispatch events to a privately managed group of subscribers. """ _dispatch_map = None def set_map(self, amap): self._dispatch_map = amap return self def get_map(self): return self._dispatch_map def subscribe(self, event_type, handler): """Subscribe the given handler to an event_type. Handlers are called in the order they are subscribed. """ if self._dispatch_map is None: self.set_map({}) lst = self._dispatch_map.get(event_type, None) if lst is None: lst = [handler] else: lst.append(handler) self._dispatch_map[event_type] = lst return self def dispatch(self, event): """Dispatch the given event to the subscribed handlers for the event's type""" if self._dispatch_map is not None: lst = self._dispatch_map.get(type(event), None) if lst is None: raise ValueError("unknown event type: %s" % type(event)) for l_ in lst: l_(event) def test(): import doctest doctest.testmod() if __name__ == "__main__": test() rdflib-6.1.1/rdflib/exceptions.py000066400000000000000000000044401415774155300167740ustar00rootroot00000000000000""" TODO: """ __all__ = [ "Error", "TypeCheckError", "SubjectTypeError", "PredicateTypeError", "ObjectTypeError", "ContextTypeError", "ParserError", ] class Error(Exception): """Base class for rdflib exceptions.""" def __init__(self, msg=None): Exception.__init__(self, msg) self.msg = msg class TypeCheckError(Error): """Parts of assertions are subject to type checks.""" def __init__(self, node): Error.__init__(self, node) self.type = type(node) self.node = node class SubjectTypeError(TypeCheckError): """Subject of an assertion must be an instance of URIRef.""" def __init__(self, node): TypeCheckError.__init__(self, node) self.msg = "Subject must be instance of URIRef or BNode: %s(%s)" % ( self.node, self.type, ) class PredicateTypeError(TypeCheckError): """Predicate of an assertion must be an instance of URIRef.""" def __init__(self, node): TypeCheckError.__init__(self, node) self.msg = "Predicate must be a URIRef instance: %s(%s)" % ( self.node, self.type, ) class ObjectTypeError(TypeCheckError): """Object of an assertion must be an instance of URIRef, Literal, or BNode.""" def __init__(self, node): TypeCheckError.__init__(self, node) self.msg = ( "\ Object must be instance of URIRef, Literal, or BNode: %s(%s)" % (self.node, self.type) ) class ContextTypeError(TypeCheckError): """Context of an assertion must be an instance of URIRef.""" def __init__(self, node): TypeCheckError.__init__(self, node) self.msg = "Context must be instance of URIRef or BNode: %s(%s)" % ( self.node, self.type, ) class ParserError(Error): """RDF Parser error.""" def __init__(self, msg): Error.__init__(self, msg) self.msg = msg def __str__(self): return self.msg class UniquenessError(Error): """A uniqueness assumption was made in the context, and that is not true""" def __init__(self, values): Error.__init__( self, "\ Uniqueness assumption is not fulfilled. Multiple values are: %s" % values, ) rdflib-6.1.1/rdflib/extras/000077500000000000000000000000001415774155300155455ustar00rootroot00000000000000rdflib-6.1.1/rdflib/extras/__init__.py000066400000000000000000000000301415774155300176470ustar00rootroot00000000000000# -*- coding: utf-8 -*- rdflib-6.1.1/rdflib/extras/cmdlineutils.py000066400000000000000000000034431415774155300206170ustar00rootroot00000000000000import sys import time import getopt import rdflib import codecs from rdflib.util import guess_format def _help(): sys.stderr.write( """ program.py [-f ] [-o ] [files...] Read RDF files given on STDOUT - does something to the resulting graph If no files are given, read from stdin -o specifies file for output, if not given stdout is used -f specifies parser to use, if not given it is guessed from extension """ ) def main(target, _help=_help, options="", stdin=True): """ A main function for tools that read RDF from files given on commandline or from STDIN (if stdin parameter is true) """ args, files = getopt.getopt(sys.argv[1:], "hf:o:" + options) dargs = dict(args) if "-h" in dargs: _help() sys.exit(-1) g = rdflib.Graph() if "-f" in dargs: f = dargs["-f"] else: f = None if "-o" in dargs: sys.stderr.write("Output to %s\n" % dargs["-o"]) out = codecs.open(dargs["-o"], "w", "utf-8") else: out = sys.stdout start = time.time() if len(files) == 0 and stdin: sys.stderr.write("Reading from stdin as %s..." % f) g.load(sys.stdin, format=f) sys.stderr.write("[done]\n") else: size = 0 for x in files: if f is None: f = guess_format(x) start1 = time.time() sys.stderr.write("Loading %s as %s... " % (x, f)) g.load(x, format=f) sys.stderr.write( "done.\t(%d triples\t%.2f seconds)\n" % (len(g) - size, time.time() - start1) ) size = len(g) sys.stderr.write( "Loaded a total of %d triples in %.2f seconds.\n" % (len(g), time.time() - start) ) target(g, out, args) rdflib-6.1.1/rdflib/extras/describer.py000066400000000000000000000222741415774155300200700ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- __doc__ = """ A Describer is a stateful utility for creating RDF statements in a semi-declarative manner. It has methods for creating literal values, rel and rev resource relations (somewhat resembling RDFa). The `rel` and ``rev`` methods return a context manager which sets the current about to the referenced resource for the context scope (for use with the ``with`` statement). Full example in the ``to_rdf`` method below:: >>> import datetime >>> from rdflib.graph import Graph >>> from rdflib.namespace import Namespace, RDFS, FOAF >>> >>> ORG_URI = "http://example.org/" >>> >>> CV = Namespace("http://purl.org/captsolo/resume-rdf/0.2/cv#") >>> >>> class Person(object): ... def __init__(self): ... self.first_name = u"Some" ... self.last_name = u"Body" ... self.username = "some1" ... self.presentation = u"Just a Python & RDF hacker." ... self.image = "/images/persons/" + self.username + ".jpg" ... self.site = "http://example.net/" ... self.start_date = datetime.date(2009, 9, 4) ... def get_full_name(self): ... return u" ".join([self.first_name, self.last_name]) ... def get_absolute_url(self): ... return "/persons/" + self.username ... def get_thumbnail_url(self): ... return self.image.replace('.jpg', '-thumb.jpg') ... ... def to_rdf(self): ... graph = Graph() ... graph.bind('foaf', FOAF) ... graph.bind('cv', CV) ... lang = 'en' ... d = Describer(graph, base=ORG_URI) ... d.about(self.get_absolute_url()+'#person') ... d.rdftype(FOAF.Person) ... d.value(FOAF.name, self.get_full_name()) ... d.value(FOAF.givenName, self.first_name) ... d.value(FOAF.familyName, self.last_name) ... d.rel(FOAF.homepage, self.site) ... d.value(RDFS.comment, self.presentation, lang=lang) ... with d.rel(FOAF.depiction, self.image): ... d.rdftype(FOAF.Image) ... d.rel(FOAF.thumbnail, self.get_thumbnail_url()) ... with d.rev(CV.aboutPerson): ... d.rdftype(CV.CV) ... with d.rel(CV.hasWorkHistory): ... d.value(CV.startDate, self.start_date) ... d.rel(CV.employedIn, ORG_URI+"#company") ... return graph ... >>> person_graph = Person().to_rdf() >>> expected = Graph().parse(data=''' ... ... ... Some Body ... Some ... Body ... ... ... ... ... ... ... Just a Python & RDF hacker. ... ... ... ... ... ... ... ... ... 2009-09-04 ... ... ... ... ... ... ''', format="xml") >>> >>> from rdflib.compare import isomorphic >>> isomorphic(person_graph, expected) #doctest: +SKIP True """ from contextlib import contextmanager from rdflib.graph import Graph from rdflib.namespace import RDF from rdflib.term import BNode from rdflib.term import Identifier from rdflib.term import Literal from rdflib.term import URIRef class Describer(object): def __init__(self, graph=None, about=None, base=None): if graph is None: graph = Graph() self.graph = graph self.base = base self._subjects = [] self.about(about or None) def about(self, subject, **kws): """ Sets the current subject. Will convert the given object into an ``URIRef`` if it's not an ``Identifier``. Usage:: >>> d = Describer() >>> d._current() #doctest: +ELLIPSIS rdflib.term.BNode(...) >>> d.about("http://example.org/") >>> d._current() rdflib.term.URIRef(u'http://example.org/') """ kws.setdefault("base", self.base) subject = cast_identifier(subject, **kws) if self._subjects: self._subjects[-1] = subject else: self._subjects.append(subject) def value(self, p, v, **kws): """ Set a literal value for the given property. Will cast the value to an ``Literal`` if a plain literal is given. Usage:: >>> from rdflib import URIRef >>> from rdflib.namespace import RDF, RDFS >>> d = Describer(about="http://example.org/") >>> d.value(RDFS.label, "Example") >>> d.graph.value(URIRef('http://example.org/'), RDFS.label) rdflib.term.Literal(u'Example') """ v = cast_value(v, **kws) self.graph.add((self._current(), p, v)) def rel(self, p, o=None, **kws): """Set an object for the given property. Will convert the given object into an ``URIRef`` if it's not an ``Identifier``. If none is given, a new ``BNode`` is used. Returns a context manager for use in a ``with`` block, within which the given object is used as current subject. Usage:: >>> from rdflib import URIRef >>> from rdflib.namespace import RDF, RDFS >>> d = Describer(about="/", base="http://example.org/") >>> _ctxt = d.rel(RDFS.seeAlso, "/about") >>> d.graph.value(URIRef('http://example.org/'), RDFS.seeAlso) rdflib.term.URIRef(u'http://example.org/about') >>> with d.rel(RDFS.seeAlso, "/more"): ... d.value(RDFS.label, "More") >>> (URIRef('http://example.org/'), RDFS.seeAlso, ... URIRef('http://example.org/more')) in d.graph True >>> d.graph.value(URIRef('http://example.org/more'), RDFS.label) rdflib.term.Literal(u'More') """ kws.setdefault("base", self.base) p = cast_identifier(p) o = cast_identifier(o, **kws) self.graph.add((self._current(), p, o)) return self._subject_stack(o) def rev(self, p, s=None, **kws): """ Same as ``rel``, but uses current subject as *object* of the relation. The given resource is still used as subject in the returned context manager. Usage:: >>> from rdflib import URIRef >>> from rdflib.namespace import RDF, RDFS >>> d = Describer(about="http://example.org/") >>> with d.rev(RDFS.seeAlso, "http://example.net/"): ... d.value(RDFS.label, "Net") >>> (URIRef('http://example.net/'), RDFS.seeAlso, ... URIRef('http://example.org/')) in d.graph True >>> d.graph.value(URIRef('http://example.net/'), RDFS.label) rdflib.term.Literal(u'Net') """ kws.setdefault("base", self.base) p = cast_identifier(p) s = cast_identifier(s, **kws) self.graph.add((s, p, self._current())) return self._subject_stack(s) def rdftype(self, t): """ Shorthand for setting rdf:type of the current subject. Usage:: >>> from rdflib import URIRef >>> from rdflib.namespace import RDF, RDFS >>> d = Describer(about="http://example.org/") >>> d.rdftype(RDFS.Resource) >>> (URIRef('http://example.org/'), ... RDF.type, RDFS.Resource) in d.graph True """ self.graph.add((self._current(), RDF.type, t)) def _current(self): return self._subjects[-1] @contextmanager def _subject_stack(self, subject): self._subjects.append(subject) yield None self._subjects.pop() def cast_value(v, **kws): if not isinstance(v, Literal): v = Literal(v, **kws) return v def cast_identifier(ref, **kws): ref = ref or BNode() if not isinstance(ref, Identifier): ref = URIRef(ref, **kws) return ref rdflib-6.1.1/rdflib/extras/external_graph_libs.py000066400000000000000000000266051415774155300221440ustar00rootroot00000000000000#!/usr/bin/env python # encoding: utf-8 """Convert (to and) from rdflib graphs to other well known graph libraries. Currently the following libraries are supported: - networkx: MultiDiGraph, DiGraph, Graph - graph_tool: Graph Doctests in this file are all skipped, as we can't run them conditionally if networkx or graph_tool are available and they would err otherwise. see ../../test/test_extras_external_graph_libs.py for conditional tests """ import logging logger = logging.getLogger(__name__) def _identity(x): return x def _rdflib_to_networkx_graph( graph, nxgraph, calc_weights, edge_attrs, transform_s=_identity, transform_o=_identity, ): """Helper method for multidigraph, digraph and graph. Modifies nxgraph in-place! Arguments: graph: an rdflib.Graph. nxgraph: a networkx.Graph/DiGraph/MultiDigraph. calc_weights: If True adds a 'weight' attribute to each edge according to the count of s,p,o triples between s and o, which is meaningful for Graph/DiGraph. edge_attrs: Callable to construct edge data from s, p, o. 'triples' attribute is handled specially to be merged. 'weight' should not be generated if calc_weights==True. (see invokers below!) transform_s: Callable to transform node generated from s. transform_o: Callable to transform node generated from o. """ assert callable(edge_attrs) assert callable(transform_s) assert callable(transform_o) import networkx as nx for s, p, o in graph: ts, to = transform_s(s), transform_o(o) # apply possible transformations data = nxgraph.get_edge_data(ts, to) if data is None or isinstance(nxgraph, nx.MultiDiGraph): # no edge yet, set defaults data = edge_attrs(s, p, o) if calc_weights: data["weight"] = 1 nxgraph.add_edge(ts, to, **data) else: # already have an edge, just update attributes if calc_weights: data["weight"] += 1 if "triples" in data: d = edge_attrs(s, p, o) data["triples"].extend(d["triples"]) def rdflib_to_networkx_multidigraph( graph, edge_attrs=lambda s, p, o: {"key": p}, **kwds ): """Converts the given graph into a networkx.MultiDiGraph. The subjects and objects are the later nodes of the MultiDiGraph. The predicates are used as edge keys (to identify multi-edges). :Parameters: - graph: a rdflib.Graph. - edge_attrs: Callable to construct later edge_attributes. It receives 3 variables (s, p, o) and should construct a dictionary that is passed to networkx's add_edge(s, o, \*\*attrs) function. By default this will include setting the MultiDiGraph key=p here. If you don't want to be able to re-identify the edge later on, you can set this to `lambda s, p, o: {}`. In this case MultiDiGraph's default (increasing ints) will be used. Returns: networkx.MultiDiGraph >>> from rdflib import Graph, URIRef, Literal >>> g = Graph() >>> a, b, l = URIRef('a'), URIRef('b'), Literal('l') >>> p, q = URIRef('p'), URIRef('q') >>> edges = [(a, p, b), (a, q, b), (b, p, a), (b, p, l)] >>> for t in edges: ... g.add(t) ... >>> mdg = rdflib_to_networkx_multidigraph(g) >>> len(mdg.edges()) 4 >>> mdg.has_edge(a, b) True >>> mdg.has_edge(a, b, key=p) True >>> mdg.has_edge(a, b, key=q) True >>> mdg = rdflib_to_networkx_multidigraph(g, edge_attrs=lambda s,p,o: {}) >>> mdg.has_edge(a, b, key=0) True >>> mdg.has_edge(a, b, key=1) True """ import networkx as nx mdg = nx.MultiDiGraph() _rdflib_to_networkx_graph(graph, mdg, False, edge_attrs, **kwds) return mdg def rdflib_to_networkx_digraph( graph, calc_weights=True, edge_attrs=lambda s, p, o: {"triples": [(s, p, o)]}, **kwds, ): """Converts the given graph into a networkx.DiGraph. As an rdflib.Graph() can contain multiple edges between nodes, by default adds the a 'triples' attribute to the single DiGraph edge with a list of all triples between s and o. Also by default calculates the edge weight as the length of triples. :Parameters: - `graph`: a rdflib.Graph. - `calc_weights`: If true calculate multi-graph edge-count as edge 'weight' - `edge_attrs`: Callable to construct later edge_attributes. It receives 3 variables (s, p, o) and should construct a dictionary that is passed to networkx's add_edge(s, o, \*\*attrs) function. By default this will include setting the 'triples' attribute here, which is treated specially by us to be merged. Other attributes of multi-edges will only contain the attributes of the first edge. If you don't want the 'triples' attribute for tracking, set this to `lambda s, p, o: {}`. Returns: networkx.DiGraph >>> from rdflib import Graph, URIRef, Literal >>> g = Graph() >>> a, b, l = URIRef('a'), URIRef('b'), Literal('l') >>> p, q = URIRef('p'), URIRef('q') >>> edges = [(a, p, b), (a, q, b), (b, p, a), (b, p, l)] >>> for t in edges: ... g.add(t) ... >>> dg = rdflib_to_networkx_digraph(g) >>> dg[a][b]['weight'] 2 >>> sorted(dg[a][b]['triples']) == [(a, p, b), (a, q, b)] True >>> len(dg.edges()) 3 >>> dg.size() 3 >>> dg.size(weight='weight') 4.0 >>> dg = rdflib_to_networkx_graph(g, False, edge_attrs=lambda s,p,o:{}) >>> 'weight' in dg[a][b] False >>> 'triples' in dg[a][b] False """ import networkx as nx dg = nx.DiGraph() _rdflib_to_networkx_graph(graph, dg, calc_weights, edge_attrs, **kwds) return dg def rdflib_to_networkx_graph( graph, calc_weights=True, edge_attrs=lambda s, p, o: {"triples": [(s, p, o)]}, **kwds, ): """Converts the given graph into a networkx.Graph. As an rdflib.Graph() can contain multiple directed edges between nodes, by default adds the a 'triples' attribute to the single DiGraph edge with a list of triples between s and o in graph. Also by default calculates the edge weight as the len(triples). :Parameters: - graph: a rdflib.Graph. - calc_weights: If true calculate multi-graph edge-count as edge 'weight' - edge_attrs: Callable to construct later edge_attributes. It receives 3 variables (s, p, o) and should construct a dictionary that is passed to networkx's add_edge(s, o, \*\*attrs) function. By default this will include setting the 'triples' attribute here, which is treated specially by us to be merged. Other attributes of multi-edges will only contain the attributes of the first edge. If you don't want the 'triples' attribute for tracking, set this to `lambda s, p, o: {}`. Returns: networkx.Graph >>> from rdflib import Graph, URIRef, Literal >>> g = Graph() >>> a, b, l = URIRef('a'), URIRef('b'), Literal('l') >>> p, q = URIRef('p'), URIRef('q') >>> edges = [(a, p, b), (a, q, b), (b, p, a), (b, p, l)] >>> for t in edges: ... g.add(t) ... >>> ug = rdflib_to_networkx_graph(g) >>> ug[a][b]['weight'] 3 >>> sorted(ug[a][b]['triples']) == [(a, p, b), (a, q, b), (b, p, a)] True >>> len(ug.edges()) 2 >>> ug.size() 2 >>> ug.size(weight='weight') 4.0 >>> ug = rdflib_to_networkx_graph(g, False, edge_attrs=lambda s,p,o:{}) >>> 'weight' in ug[a][b] False >>> 'triples' in ug[a][b] False """ import networkx as nx g = nx.Graph() _rdflib_to_networkx_graph(graph, g, calc_weights, edge_attrs, **kwds) return g def rdflib_to_graphtool( graph, v_prop_names=[str("term")], e_prop_names=[str("term")], transform_s=lambda s, p, o: {str("term"): s}, transform_p=lambda s, p, o: {str("term"): p}, transform_o=lambda s, p, o: {str("term"): o}, ): """Converts the given graph into a graph_tool.Graph(). The subjects and objects are the later vertices of the Graph. The predicates become edges. :Parameters: - graph: a rdflib.Graph. - v_prop_names: a list of names for the vertex properties. The default is set to ['term'] (see transform_s, transform_o below). - e_prop_names: a list of names for the edge properties. - transform_s: callable with s, p, o input. Should return a dictionary containing a value for each name in v_prop_names. By default is set to {'term': s} which in combination with v_prop_names = ['term'] adds s as 'term' property to the generated vertex for s. - transform_p: similar to transform_s, but wrt. e_prop_names. By default returns {'term': p} which adds p as a property to the generated edge between the vertex for s and the vertex for o. - transform_o: similar to transform_s. Returns: graph_tool.Graph() >>> from rdflib import Graph, URIRef, Literal >>> g = Graph() >>> a, b, l = URIRef('a'), URIRef('b'), Literal('l') >>> p, q = URIRef('p'), URIRef('q') >>> edges = [(a, p, b), (a, q, b), (b, p, a), (b, p, l)] >>> for t in edges: ... g.add(t) ... >>> mdg = rdflib_to_graphtool(g) >>> len(list(mdg.edges())) 4 >>> from graph_tool import util as gt_util >>> vpterm = mdg.vertex_properties['term'] >>> va = gt_util.find_vertex(mdg, vpterm, a)[0] >>> vb = gt_util.find_vertex(mdg, vpterm, b)[0] >>> vl = gt_util.find_vertex(mdg, vpterm, l)[0] >>> (va, vb) in [(e.source(), e.target()) for e in list(mdg.edges())] True >>> epterm = mdg.edge_properties['term'] >>> len(list(gt_util.find_edge(mdg, epterm, p))) == 3 True >>> len(list(gt_util.find_edge(mdg, epterm, q))) == 1 True >>> mdg = rdflib_to_graphtool( ... g, ... e_prop_names=[str('name')], ... transform_p=lambda s, p, o: {str('name'): unicode(p)}) >>> epterm = mdg.edge_properties['name'] >>> len(list(gt_util.find_edge(mdg, epterm, unicode(p)))) == 3 True >>> len(list(gt_util.find_edge(mdg, epterm, unicode(q)))) == 1 True """ import graph_tool as gt g = gt.Graph() vprops = [(vpn, g.new_vertex_property("object")) for vpn in v_prop_names] for vpn, vprop in vprops: g.vertex_properties[vpn] = vprop eprops = [(epn, g.new_edge_property("object")) for epn in e_prop_names] for epn, eprop in eprops: g.edge_properties[epn] = eprop node_to_vertex = {} for s, p, o in graph: sv = node_to_vertex.get(s) if sv is None: v = g.add_vertex() node_to_vertex[s] = v tmp_props = transform_s(s, p, o) for vpn, vprop in vprops: vprop[v] = tmp_props[vpn] sv = v ov = node_to_vertex.get(o) if ov is None: v = g.add_vertex() node_to_vertex[o] = v tmp_props = transform_o(s, p, o) for vpn, vprop in vprops: vprop[v] = tmp_props[vpn] ov = v e = g.add_edge(sv, ov) tmp_props = transform_p(s, p, o) for epn, eprop in eprops: eprop[e] = tmp_props[epn] return g rdflib-6.1.1/rdflib/extras/infixowl.py000066400000000000000000002141251415774155300177630ustar00rootroot00000000000000#!/usr/bin/env python # -*- coding: utf-8 -*- __doc__ = """RDFLib Python binding for OWL Abstract Syntax see: http://www.w3.org/TR/owl-semantics/syntax.html http://owl-workshop.man.ac.uk/acceptedLong/submission_9.pdf 3.2.3 Axioms for complete classes without using owl:equivalentClass Named class description of type 2 (with owl:oneOf) or type 4-6 (with owl:intersectionOf, owl:unionOf or owl:complementOf Uses Manchester Syntax for __repr__ >>> exNs = Namespace('http://example.com/') >>> namespace_manager = NamespaceManager(Graph()) >>> namespace_manager.bind('ex', exNs, override=False) >>> namespace_manager.bind('owl', OWL, override=False) >>> g = Graph() >>> g.namespace_manager = namespace_manager Now we have an empty graph, we can construct OWL classes in it using the Python classes defined in this module >>> a = Class(exNs.Opera, graph=g) Now we can assert rdfs:subClassOf and owl:equivalentClass relationships (in the underlying graph) with other classes using the 'subClassOf' and 'equivalentClass' descriptors which can be set to a list of objects for the corresponding predicates. >>> a.subClassOf = [exNs.MusicalWork] We can then access the rdfs:subClassOf relationships >>> print(list(a.subClassOf)) [Class: ex:MusicalWork ] This can also be used against already populated graphs: >>> owlGraph = Graph().parse(str(OWL)) #doctest: +SKIP >>> namespace_manager.bind('owl', OWL, override=False) #doctest: +SKIP >>> owlGraph.namespace_manager = namespace_manager #doctest: +SKIP >>> list(Class(OWL.Class, graph=owlGraph).subClassOf) #doctest: +SKIP [Class: rdfs:Class ] Operators are also available. For instance we can add ex:Opera to the extension of the ex:CreativeWork class via the '+=' operator >>> a #doctest: +SKIP Class: ex:Opera SubClassOf: ex:MusicalWork >>> b = Class(exNs.CreativeWork, graph=g) >>> b += a >>> print(sorted(a.subClassOf, key=lambda c:c.identifier)) #doctest: +SKIP [Class: ex:CreativeWork , Class: ex:MusicalWork ] And we can then remove it from the extension as well >>> b -= a >>> a #doctest: +SKIP Class: ex:Opera SubClassOf: ex:MusicalWork Boolean class constructions can also be created with Python operators. For example, The | operator can be used to construct a class consisting of a owl:unionOf the operands: >>> c = a | b | Class(exNs.Work, graph=g) >>> c #doctest: +SKIP ( ex:Opera OR ex:CreativeWork OR ex:Work ) Boolean class expressions can also be operated as lists (using python list operators) >>> del c[c.index(Class(exNs.Work, graph=g))] >>> c #doctest: +SKIP ( ex:Opera OR ex:CreativeWork ) The '&' operator can be used to construct class intersection: >>> woman = Class(exNs.Female, graph=g) & Class(exNs.Human, graph=g) >>> woman.identifier = exNs.Woman >>> woman #doctest: +SKIP ( ex:Female AND ex:Human ) >>> len(woman) 2 Enumerated classes can also be manipulated >>> contList = [Class(exNs.Africa, graph=g), Class(exNs.NorthAmerica, graph=g)] >>> EnumeratedClass(members=contList, graph=g) #doctest: +SKIP { ex:Africa ex:NorthAmerica } owl:Restrictions can also be instantiated: >>> Restriction(exNs.hasParent, graph=g, allValuesFrom=exNs.Human) #doctest: +SKIP ( ex:hasParent ONLY ex:Human ) Restrictions can also be created using Manchester OWL syntax in 'colloquial' Python >>> exNs.hasParent | some | Class(exNs.Physician, graph=g) #doctest: +SKIP ( ex:hasParent SOME ex:Physician ) >>> Property(exNs.hasParent,graph=g) | max | Literal(1) #doctest: +SKIP ( ex:hasParent MAX 1 ) >>> print(g.serialize(format='pretty-xml')) #doctest: +SKIP """ import itertools from rdflib import BNode, Literal, Namespace, RDF, RDFS, URIRef, Variable from rdflib.graph import Graph from rdflib.collection import Collection from rdflib.namespace import OWL, XSD from rdflib.namespace import NamespaceManager from rdflib.term import Identifier from rdflib.util import first import logging logger = logging.getLogger(__name__) """ From: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/384122 Python has the wonderful "in" operator and it would be nice to have additional infix operator like this. This recipe shows how (almost) arbitrary infix operators can be defined. """ __all__ = [ "nsBinds", "ACE_NS", "CLASS_RELATIONS", "some", "only", "max", "min", "exactly", "value", "PropertyAbstractSyntax", "AllClasses", "AllDifferent", "AllProperties", "AnnotatableTerms", "BooleanClass", "Callable", "CastClass", "Class", "ClassNamespaceFactory", "classOrIdentifier", "classOrTerm", "CommonNSBindings", "ComponentTerms", "DeepClassClear", "EnumeratedClass", "generateQName", "GetIdentifiedClasses", "Individual", "MalformedClass", "manchesterSyntax", "Ontology", "OWLRDFListProxy", "Property", "propertyOrIdentifier", "Restriction", "termDeletionDecorator", ] # definition of an Infix operator class # this recipe also works in jython # calling sequence for the infix is either: # x |op| y # or: # x <> y class Infix: def __init__(self, function): self.function = function def __ror__(self, other): return Infix(lambda x, self=self, other=other: self.function(other, x)) def __or__(self, other): return self.function(other) def __rlshift__(self, other): return Infix(lambda x, self=self, other=other: self.function(other, x)) def __rshift__(self, other): return self.function(other) def __call__(self, value1, value2): return self.function(value1, value2) nsBinds = { "skos": "http://www.w3.org/2004/02/skos/core#", "rdf": RDF, "rdfs": RDFS, "owl": OWL, "list": URIRef("http://www.w3.org/2000/10/swap/list#"), "dc": "http://purl.org/dc/elements/1.1/", } def generateQName(graph, uri): prefix, uri, localName = graph.compute_qname(classOrIdentifier(uri)) return ":".join([prefix, localName]) def classOrTerm(thing): if isinstance(thing, Class): return thing.identifier else: assert isinstance(thing, (URIRef, BNode, Literal)) return thing def classOrIdentifier(thing): if isinstance(thing, (Property, Class)): return thing.identifier else: assert isinstance(thing, (URIRef, BNode)), ( "Expecting a Class, Property, URIRef, or BNode.. not a %s" % thing ) return thing def propertyOrIdentifier(thing): if isinstance(thing, Property): return thing.identifier else: assert isinstance(thing, URIRef) return thing def manchesterSyntax(thing, store, boolean=None, transientList=False): """ Core serialization """ assert thing is not None if boolean: if transientList: liveChildren = iter(thing) children = [manchesterSyntax(child, store) for child in thing] else: liveChildren = iter(Collection(store, thing)) children = [ manchesterSyntax(child, store) for child in Collection(store, thing) ] if boolean == OWL.intersectionOf: childList = [] named = [] for child in liveChildren: if isinstance(child, URIRef): named.append(child) else: childList.append(child) if named: def castToQName(x): prefix, uri, localName = store.compute_qname(x) return ":".join([prefix, localName]) if len(named) > 1: prefix = "( " + " AND ".join(map(castToQName, named)) + " )" else: prefix = manchesterSyntax(named[0], store) if childList: return ( str(prefix) + " THAT " + " AND ".join( [str(manchesterSyntax(x, store)) for x in childList] ) ) else: return prefix else: return "( " + " AND ".join([str(c) for c in children]) + " )" elif boolean == OWL.unionOf: return "( " + " OR ".join([str(c) for c in children]) + " )" elif boolean == OWL.oneOf: return "{ " + " ".join([str(c) for c in children]) + " }" else: assert boolean == OWL.complementOf elif OWL.Restriction in store.objects(subject=thing, predicate=RDF.type): prop = list(store.objects(subject=thing, predicate=OWL.onProperty))[0] prefix, uri, localName = store.compute_qname(prop) propString = ":".join([prefix, localName]) label = first(store.objects(subject=prop, predicate=RDFS.label)) if label: propString = "'%s'" % label for onlyClass in store.objects(subject=thing, predicate=OWL.allValuesFrom): return "( %s ONLY %s )" % (propString, manchesterSyntax(onlyClass, store)) for val in store.objects(subject=thing, predicate=OWL.hasValue): return "( %s VALUE %s )" % (propString, manchesterSyntax(val, store)) for someClass in store.objects(subject=thing, predicate=OWL.someValuesFrom): return "( %s SOME %s )" % (propString, manchesterSyntax(someClass, store)) cardLookup = { OWL.maxCardinality: "MAX", OWL.minCardinality: "MIN", OWL.cardinality: "EQUALS", } for s, p, o in store.triples_choices((thing, list(cardLookup.keys()), None)): return "( %s %s %s )" % (propString, cardLookup[p], o) compl = list(store.objects(subject=thing, predicate=OWL.complementOf)) if compl: return "( NOT %s )" % (manchesterSyntax(compl[0], store)) else: prolog = "\n".join(["PREFIX %s: <%s>" % (k, nsBinds[k]) for k in nsBinds]) qstr = ( prolog + "\nSELECT ?p ?bool WHERE {?class a owl:Class; ?p ?bool ." + "?bool rdf:first ?foo }" ) initb = {Variable("?class"): thing} for boolProp, col in store.query(qstr, processor="sparql", initBindings=initb): if not isinstance(thing, URIRef): return manchesterSyntax(col, store, boolean=boolProp) try: prefix, uri, localName = store.compute_qname(thing) qname = ":".join([prefix, localName]) except Exception: if isinstance(thing, BNode): return thing.n3() return "<" + thing + ">" label = first(Class(thing, graph=store).label) if label: return label else: return qname def GetIdentifiedClasses(graph): for c in graph.subjects(predicate=RDF.type, object=OWL.Class): if isinstance(c, URIRef): yield Class(c) def termDeletionDecorator(prop): def someFunc(func): func.property = prop return func return someFunc class TermDeletionHelper: def __init__(self, prop): self.prop = prop def __call__(self, f): def _remover(inst): inst.graph.remove((inst.identifier, self.prop, None)) return _remover class Individual(object): """ A typed individual """ factoryGraph = Graph() def serialize(self, graph): for fact in self.factoryGraph.triples((self.identifier, None, None)): graph.add(fact) def __init__(self, identifier=None, graph=None): self.__identifier = identifier is not None and identifier or BNode() if graph is None: self.graph = self.factoryGraph else: self.graph = graph self.qname = None if not isinstance(self.identifier, BNode): try: prefix, uri, localName = self.graph.compute_qname(self.identifier) self.qname = ":".join([prefix, localName]) except: pass def clearInDegree(self): self.graph.remove((None, None, self.identifier)) def clearOutDegree(self): self.graph.remove((self.identifier, None, None)) def delete(self): self.clearInDegree() self.clearOutDegree() def replace(self, other): for s, p, o in self.graph.triples((None, None, self.identifier)): self.graph.add((s, p, classOrIdentifier(other))) self.delete() def _get_type(self): for _t in self.graph.objects(subject=self.identifier, predicate=RDF.type): yield _t def _set_type(self, kind): if not kind: return if isinstance(kind, (Individual, Identifier)): self.graph.add((self.identifier, RDF.type, classOrIdentifier(kind))) else: for c in kind: assert isinstance(c, (Individual, Identifier)) self.graph.add((self.identifier, RDF.type, classOrIdentifier(c))) @TermDeletionHelper(RDF.type) def _delete_type(self): """ >>> g = Graph() >>> b=Individual(OWL.Restriction,g) >>> b.type = RDF.Resource >>> len(list(b.type)) 1 >>> del b.type >>> len(list(b.type)) 0 """ pass type = property(_get_type, _set_type, _delete_type) def _get_identifier(self): return self.__identifier def _set_identifier(self, i): assert i if i != self.__identifier: oldStmtsOut = [ (p, o) for s, p, o in self.graph.triples((self.__identifier, None, None)) ] oldStmtsIn = [ (s, p) for s, p, o in self.graph.triples((None, None, self.__identifier)) ] for p1, o1 in oldStmtsOut: self.graph.remove((self.__identifier, p1, o1)) for s1, p1 in oldStmtsIn: self.graph.remove((s1, p1, self.__identifier)) self.__identifier = i self.graph.addN([(i, p1, o1, self.graph) for p1, o1 in oldStmtsOut]) self.graph.addN([(s1, p1, i, self.graph) for s1, p1 in oldStmtsIn]) if not isinstance(i, BNode): try: prefix, uri, localName = self.graph.compute_qname(i) self.qname = ":".join([prefix, localName]) except: pass identifier = property(_get_identifier, _set_identifier) def _get_sameAs(self): for _t in self.graph.objects(subject=self.identifier, predicate=OWL.sameAs): yield _t def _set_sameAs(self, term): # if not kind: # return if isinstance(term, (Individual, Identifier)): self.graph.add((self.identifier, OWL.sameAs, classOrIdentifier(term))) else: for c in term: assert isinstance(c, (Individual, Identifier)) self.graph.add((self.identifier, OWL.sameAs, classOrIdentifier(c))) @TermDeletionHelper(OWL.sameAs) def _delete_sameAs(self): pass sameAs = property(_get_sameAs, _set_sameAs, _delete_sameAs) ACE_NS = Namespace("http://attempto.ifi.uzh.ch/ace_lexicon#") class AnnotatableTerms(Individual): """ Terms in an OWL ontology with rdfs:label and rdfs:comment """ def __init__(self, identifier, graph=None, nameAnnotation=None, nameIsLabel=False): super(AnnotatableTerms, self).__init__(identifier, graph) if nameAnnotation: self.setupACEAnnotations() self.PN_sgProp.extent = [ (self.identifier, self.handleAnnotation(nameAnnotation)) ] if nameIsLabel: self.label = [nameAnnotation] def handleAnnotation(self, val): return val if isinstance(val, Literal) else Literal(val) def setupACEAnnotations(self): self.graph.bind("ace", ACE_NS, override=False) # PN_sg singular form of a proper name () self.PN_sgProp = Property( ACE_NS.PN_sg, baseType=OWL.AnnotationProperty, graph=self.graph ) # CN_sg singular form of a common noun self.CN_sgProp = Property( ACE_NS.CN_sg, baseType=OWL.AnnotationProperty, graph=self.graph ) # CN_pl plural form of a common noun self.CN_plProp = Property( ACE_NS.CN_pl, baseType=OWL.AnnotationProperty, graph=self.graph ) # singular form of a transitive verb self.TV_sgProp = Property( ACE_NS.TV_sg, baseType=OWL.AnnotationProperty, graph=self.graph ) # plural form of a transitive verb self.TV_plProp = Property( ACE_NS.TV_pl, baseType=OWL.AnnotationProperty, graph=self.graph ) # past participle form a transitive verb self.TV_vbgProp = Property( ACE_NS.TV_vbg, baseType=OWL.AnnotationProperty, graph=self.graph ) def _get_comment(self): for comment in self.graph.objects( subject=self.identifier, predicate=RDFS.comment ): yield comment def _set_comment(self, comment): if not comment: return if isinstance(comment, Identifier): self.graph.add((self.identifier, RDFS.comment, comment)) else: for c in comment: self.graph.add((self.identifier, RDFS.comment, c)) @TermDeletionHelper(RDFS.comment) def _del_comment(self): pass comment = property(_get_comment, _set_comment, _del_comment) def _get_seeAlso(self): for sA in self.graph.objects(subject=self.identifier, predicate=RDFS.seeAlso): yield sA def _set_seeAlso(self, seeAlsos): if not seeAlsos: return for s in seeAlsos: self.graph.add((self.identifier, RDFS.seeAlso, s)) @TermDeletionHelper(RDFS.seeAlso) def _del_seeAlso(self): pass seeAlso = property(_get_seeAlso, _set_seeAlso, _del_seeAlso) def _get_label(self): for label in self.graph.objects(subject=self.identifier, predicate=RDFS.label): yield label def _set_label(self, label): if not label: return if isinstance(label, Identifier): self.graph.add((self.identifier, RDFS.label, label)) else: for l_ in label: self.graph.add((self.identifier, RDFS.label, l_)) @TermDeletionHelper(RDFS.label) def _delete_label(self): """ >>> g=Graph() >>> b=Individual(OWL.Restriction,g) >>> b.label = Literal('boo') >>> len(list(b.label)) 1 >>> del b.label >>> len(list(b.label)) 0 """ pass label = property(_get_label, _set_label, _delete_label) class Ontology(AnnotatableTerms): """The owl ontology metadata""" def __init__(self, identifier=None, imports=None, comment=None, graph=None): super(Ontology, self).__init__(identifier, graph) self.imports = imports and imports or [] self.comment = comment and comment or [] if (self.identifier, RDF.type, OWL.Ontology) not in self.graph: self.graph.add((self.identifier, RDF.type, OWL.Ontology)) def setVersion(self, version): self.graph.set((self.identifier, OWL.versionInfo, version)) def _get_imports(self): for owl in self.graph.objects( subject=self.identifier, predicate=OWL["imports"] ): yield owl def _set_imports(self, other): if not other: return for o in other: self.graph.add((self.identifier, OWL["imports"], o)) @TermDeletionHelper(OWL["imports"]) def _del_imports(self): pass imports = property(_get_imports, _set_imports, _del_imports) def AllClasses(graph): prevClasses = set() for c in graph.subjects(predicate=RDF.type, object=OWL.Class): if c not in prevClasses: prevClasses.add(c) yield Class(c) def AllProperties(graph): prevProps = set() for s, p, o in graph.triples_choices( ( None, RDF.type, [ OWL.SymmetricProperty, OWL.FunctionalProperty, OWL.InverseFunctionalProperty, OWL.TransitiveProperty, OWL.DatatypeProperty, OWL.ObjectProperty, OWL.AnnotationProperty, ], ) ): if o in [ OWL.SymmetricProperty, OWL.InverseFunctionalProperty, OWL.TransitiveProperty, OWL.ObjectProperty, ]: bType = OWL.ObjectProperty else: bType = OWL.DatatypeProperty if s not in prevProps: prevProps.add(s) yield Property(s, graph=graph, baseType=bType) class ClassNamespaceFactory(Namespace): def term(self, name): return Class(URIRef(self + name)) def __getitem__(self, key, default=None): return self.term(key) def __getattr__(self, name): if name.startswith("__"): # ignore any special Python names! raise AttributeError else: return self.term(name) CLASS_RELATIONS = set( Namespace("http://www.w3.org/2002/07/owl#resourceProperties") ).difference( [ OWL.onProperty, OWL.allValuesFrom, OWL.hasValue, OWL.someValuesFrom, OWL.inverseOf, OWL.imports, OWL.versionInfo, OWL.backwardCompatibleWith, OWL.incompatibleWith, OWL.unionOf, OWL.intersectionOf, OWL.oneOf, ] ) def ComponentTerms(cls): """ Takes a Class instance and returns a generator over the classes that are involved in its definition, ignoring unnamed classes """ if OWL.Restriction in cls.type: try: cls = CastClass(cls, Individual.factoryGraph) for s, p, innerClsId in cls.factoryGraph.triples_choices( (cls.identifier, [OWL.allValuesFrom, OWL.someValuesFrom], None) ): innerCls = Class(innerClsId, skipOWLClassMembership=True) if isinstance(innerClsId, BNode): for _c in ComponentTerms(innerCls): yield _c else: yield innerCls except: pass else: cls = CastClass(cls, Individual.factoryGraph) if isinstance(cls, BooleanClass): for _cls in cls: _cls = Class(_cls, skipOWLClassMembership=True) if isinstance(_cls.identifier, BNode): for _c in ComponentTerms(_cls): yield _c else: yield _cls else: for innerCls in cls.subClassOf: if isinstance(innerCls.identifier, BNode): for _c in ComponentTerms(innerCls): yield _c else: yield innerCls for s, p, o in cls.factoryGraph.triples_choices( (classOrIdentifier(cls), CLASS_RELATIONS, None) ): if isinstance(o, BNode): for _c in ComponentTerms(CastClass(o, Individual.factoryGraph)): yield _c else: yield innerCls def DeepClassClear(classToPrune): """ Recursively clear the given class, continuing where any related class is an anonymous class >>> EX = Namespace('http://example.com/') >>> namespace_manager = NamespaceManager(Graph()) >>> namespace_manager.bind('ex', EX, override=False) >>> namespace_manager.bind('owl', OWL, override=False) >>> g = Graph() >>> g.namespace_manager = namespace_manager >>> Individual.factoryGraph = g >>> classB = Class(EX.B) >>> classC = Class(EX.C) >>> classD = Class(EX.D) >>> classE = Class(EX.E) >>> classF = Class(EX.F) >>> anonClass = EX.someProp | some | classD #doctest: +SKIP >>> classF += anonClass #doctest: +SKIP >>> list(anonClass.subClassOf) #doctest: +SKIP [Class: ex:F ] >>> classA = classE | classF | anonClass #doctest: +SKIP >>> classB += classA #doctest: +SKIP >>> classA.equivalentClass = [Class()] #doctest: +SKIP >>> classB.subClassOf = [EX.someProp | some | classC] #doctest: +SKIP >>> classA #doctest: +SKIP ( ex:E OR ex:F OR ( ex:someProp SOME ex:D ) ) >>> DeepClassClear(classA) #doctest: +SKIP >>> classA #doctest: +SKIP ( ) >>> list(anonClass.subClassOf) #doctest: +SKIP [] >>> classB #doctest: +SKIP Class: ex:B SubClassOf: ( ex:someProp SOME ex:C ) >>> otherClass = classD | anonClass #doctest: +SKIP >>> otherClass #doctest: +SKIP ( ex:D OR ( ex:someProp SOME ex:D ) ) >>> DeepClassClear(otherClass) #doctest: +SKIP >>> otherClass #doctest: +SKIP ( ) >>> otherClass.delete() #doctest: +SKIP >>> list(g.triples((otherClass.identifier, None, None))) #doctest: +SKIP [] """ def deepClearIfBNode(_class): if isinstance(classOrIdentifier(_class), BNode): DeepClassClear(_class) classToPrune = CastClass(classToPrune, Individual.factoryGraph) for c in classToPrune.subClassOf: deepClearIfBNode(c) classToPrune.graph.remove((classToPrune.identifier, RDFS.subClassOf, None)) for c in classToPrune.equivalentClass: deepClearIfBNode(c) classToPrune.graph.remove((classToPrune.identifier, OWL.equivalentClass, None)) inverseClass = classToPrune.complementOf if inverseClass: classToPrune.graph.remove((classToPrune.identifier, OWL.complementOf, None)) deepClearIfBNode(inverseClass) if isinstance(classToPrune, BooleanClass): for c in classToPrune: deepClearIfBNode(c) classToPrune.clear() classToPrune.graph.remove( (classToPrune.identifier, classToPrune._operator, None) ) class MalformedClass(Exception): def __init__(self, msg): self.msg = msg def __repr__(self): return self.msg def CastClass(c, graph=None): graph = graph is None and c.factoryGraph or graph for kind in graph.objects(subject=classOrIdentifier(c), predicate=RDF.type): if kind == OWL.Restriction: kwArgs = {"identifier": classOrIdentifier(c), "graph": graph} for s, p, o in graph.triples((classOrIdentifier(c), None, None)): if p != RDF.type: if p == OWL.onProperty: kwArgs["onProperty"] = o else: if p not in Restriction.restrictionKinds: continue kwArgs[str(p.split(str(OWL))[-1])] = o if not set( [str(i.split(str(OWL))[-1]) for i in Restriction.restrictionKinds] ).intersection(kwArgs): raise MalformedClass("Malformed owl:Restriction") return Restriction(**kwArgs) else: for s, p, o in graph.triples_choices( ( classOrIdentifier(c), [OWL.intersectionOf, OWL.unionOf, OWL.oneOf], None, ) ): if p == OWL.oneOf: return EnumeratedClass(classOrIdentifier(c), graph=graph) else: return BooleanClass(classOrIdentifier(c), operator=p, graph=graph) # assert (classOrIdentifier(c),RDF.type,OWL.Class) in graph return Class(classOrIdentifier(c), graph=graph, skipOWLClassMembership=True) class Class(AnnotatableTerms): """ 'General form' for classes: The Manchester Syntax (supported in Protege) is used as the basis for the form of this class See: http://owl-workshop.man.ac.uk/acceptedLong/submission_9.pdf: [Annotation] ‘Class:’ classID {Annotation ( (‘SubClassOf:’ ClassExpression) | (‘EquivalentTo’ ClassExpression) | (’DisjointWith’ ClassExpression)) } Appropriate excerpts from OWL Reference: ".. Subclass axioms provide us with partial definitions: they represent necessary but not sufficient conditions for establishing class membership of an individual." ".. A class axiom may contain (multiple) owl:equivalentClass statements" "..A class axiom may also contain (multiple) owl:disjointWith statements.." "..An owl:complementOf property links a class to precisely one class description." """ def _serialize(self, graph): for cl in self.subClassOf: CastClass(cl, self.graph).serialize(graph) for cl in self.equivalentClass: CastClass(cl, self.graph).serialize(graph) for cl in self.disjointWith: CastClass(cl, self.graph).serialize(graph) if self.complementOf: CastClass(self.complementOf, self.graph).serialize(graph) def serialize(self, graph): for fact in self.graph.triples((self.identifier, None, None)): graph.add(fact) self._serialize(graph) def setupNounAnnotations(self, nounAnnotations): if isinstance(nounAnnotations, tuple): CN_sgProp, CN_plProp = nounAnnotations else: CN_sgProp = nounAnnotations CN_plProp = nounAnnotations if CN_sgProp: self.CN_sgProp.extent = [ (self.identifier, self.handleAnnotation(CN_sgProp)) ] if CN_plProp: self.CN_plProp.extent = [ (self.identifier, self.handleAnnotation(CN_plProp)) ] def __init__( self, identifier=None, subClassOf=None, equivalentClass=None, disjointWith=None, complementOf=None, graph=None, skipOWLClassMembership=False, comment=None, nounAnnotations=None, nameAnnotation=None, nameIsLabel=False, ): super(Class, self).__init__(identifier, graph, nameAnnotation, nameIsLabel) if nounAnnotations: self.setupNounAnnotations(nounAnnotations) if ( not skipOWLClassMembership and (self.identifier, RDF.type, OWL.Class) not in self.graph and (self.identifier, RDF.type, OWL.Restriction) not in self.graph ): self.graph.add((self.identifier, RDF.type, OWL.Class)) self.subClassOf = subClassOf and subClassOf or [] self.equivalentClass = equivalentClass and equivalentClass or [] self.disjointWith = disjointWith and disjointWith or [] if complementOf: self.complementOf = complementOf self.comment = comment and comment or [] def _get_extent(self, graph=None): for member in (graph is None and self.graph or graph).subjects( predicate=RDF.type, object=self.identifier ): yield member def _set_extent(self, other): if not other: return for m in other: self.graph.add((classOrIdentifier(m), RDF.type, self.identifier)) @TermDeletionHelper(RDF.type) def _del_type(self): pass extent = property(_get_extent, _set_extent, _del_type) def _get_annotation(self, term=RDFS.label): for annotation in self.graph.objects(subject=self, predicate=term): yield annotation annotation = property(_get_annotation, lambda x: x) # type: ignore[arg-type,misc] def _get_extentQuery(self): return (Variable("CLASS"), RDF.type, self.identifier) def _set_extentQuery(self, other): pass extentQuery = property(_get_extentQuery, _set_extentQuery) def __hash__(self): """ >>> b=Class(OWL.Restriction) >>> c=Class(OWL.Restriction) >>> len(set([b,c])) 1 """ return hash(self.identifier) def __eq__(self, other): assert isinstance(other, Class), repr(other) return self.identifier == other.identifier def __iadd__(self, other): assert isinstance(other, Class) other.subClassOf = [self] return self def __isub__(self, other): assert isinstance(other, Class) self.graph.remove((classOrIdentifier(other), RDFS.subClassOf, self.identifier)) return self def __invert__(self): """ Shorthand for Manchester syntax's not operator """ return Class(complementOf=self) def __or__(self, other): """ Construct an anonymous class description consisting of the union of this class and 'other' and return it """ return BooleanClass( operator=OWL.unionOf, members=[self, other], graph=self.graph ) def __and__(self, other): """ Construct an anonymous class description consisting of the intersection of this class and 'other' and return it >>> exNs = Namespace('http://example.com/') >>> namespace_manager = NamespaceManager(Graph()) >>> namespace_manager.bind('ex', exNs, override=False) >>> namespace_manager.bind('owl', OWL, override=False) >>> g = Graph() >>> g.namespace_manager = namespace_manager Chaining 3 intersections >>> female = Class(exNs.Female, graph=g) >>> human = Class(exNs.Human, graph=g) >>> youngPerson = Class(exNs.YoungPerson, graph=g) >>> youngWoman = female & human & youngPerson >>> youngWoman #doctest: +SKIP ex:YoungPerson THAT ( ex:Female AND ex:Human ) >>> isinstance(youngWoman, BooleanClass) True >>> isinstance(youngWoman.identifier, BNode) True """ return BooleanClass( operator=OWL.intersectionOf, members=[self, other], graph=self.graph ) def _get_subClassOf(self): for anc in self.graph.objects( subject=self.identifier, predicate=RDFS.subClassOf ): yield Class(anc, graph=self.graph, skipOWLClassMembership=True) def _set_subClassOf(self, other): if not other: return for sc in other: self.graph.add((self.identifier, RDFS.subClassOf, classOrIdentifier(sc))) @TermDeletionHelper(RDFS.subClassOf) def _del_subClassOf(self): pass subClassOf = property(_get_subClassOf, _set_subClassOf, _del_subClassOf) def _get_equivalentClass(self): for ec in self.graph.objects( subject=self.identifier, predicate=OWL.equivalentClass ): yield Class(ec, graph=self.graph) def _set_equivalentClass(self, other): if not other: return for sc in other: self.graph.add( (self.identifier, OWL.equivalentClass, classOrIdentifier(sc)) ) @TermDeletionHelper(OWL.equivalentClass) def _del_equivalentClass(self): pass equivalentClass = property( _get_equivalentClass, _set_equivalentClass, _del_equivalentClass ) def _get_disjointWith(self): for dc in self.graph.objects( subject=self.identifier, predicate=OWL.disjointWith ): yield Class(dc, graph=self.graph) def _set_disjointWith(self, other): if not other: return for c in other: self.graph.add((self.identifier, OWL.disjointWith, classOrIdentifier(c))) @TermDeletionHelper(OWL.disjointWith) def _del_disjointWith(self): pass disjointWith = property(_get_disjointWith, _set_disjointWith, _del_disjointWith) def _get_complementOf(self): comp = list( self.graph.objects(subject=self.identifier, predicate=OWL.complementOf) ) if not comp: return None elif len(comp) == 1: return Class(comp[0], graph=self.graph) else: raise Exception(len(comp)) def _set_complementOf(self, other): if not other: return self.graph.add((self.identifier, OWL.complementOf, classOrIdentifier(other))) @TermDeletionHelper(OWL.complementOf) def _del_complementOf(self): pass complementOf = property(_get_complementOf, _set_complementOf, _del_complementOf) def _get_parents(self): """ computed attributes that returns a generator over taxonomic 'parents' by disjunction, conjunction, and subsumption >>> from rdflib.util import first >>> exNs = Namespace('http://example.com/') >>> namespace_manager = NamespaceManager(Graph()) >>> namespace_manager.bind('ex', exNs, override=False) >>> namespace_manager.bind('owl', OWL, override=False) >>> g = Graph() >>> g.namespace_manager = namespace_manager >>> Individual.factoryGraph = g >>> brother = Class(exNs.Brother) >>> sister = Class(exNs.Sister) >>> sibling = brother | sister >>> sibling.identifier = exNs.Sibling >>> sibling #doctest: +SKIP ( ex:Brother OR ex:Sister ) >>> first(brother.parents) #doctest: +SKIP Class: ex:Sibling EquivalentTo: ( ex:Brother OR ex:Sister ) >>> parent = Class(exNs.Parent) >>> male = Class(exNs.Male) >>> father = parent & male >>> father.identifier = exNs.Father >>> list(father.parents) #doctest: +SKIP [Class: ex:Parent , Class: ex:Male ] """ for parent in itertools.chain(self.subClassOf, self.equivalentClass): yield parent link = first(self.factoryGraph.subjects(RDF.first, self.identifier)) if link: listSiblings = list(self.factoryGraph.transitive_subjects(RDF.rest, link)) if listSiblings: collectionHead = listSiblings[-1] else: collectionHead = link for disjCls in self.factoryGraph.subjects(OWL.unionOf, collectionHead): if isinstance(disjCls, URIRef): yield Class(disjCls, skipOWLClassMembership=True) for rdfList in self.factoryGraph.objects(self.identifier, OWL.intersectionOf): for member in OWLRDFListProxy([rdfList], graph=self.factoryGraph): if isinstance(member, URIRef): yield Class(member, skipOWLClassMembership=True) parents = property(_get_parents) def isPrimitive(self): if (self.identifier, RDF.type, OWL.Restriction) in self.graph: return False # sc = list(self.subClassOf) ec = list(self.equivalentClass) for boolClass, p, rdfList in self.graph.triples_choices( (self.identifier, [OWL.intersectionOf, OWL.unionOf], None) ): ec.append(manchesterSyntax(rdfList, self.graph, boolean=p)) for e in ec: return False if self.complementOf: return False return True def subSumpteeIds(self): for s in self.graph.subjects(predicate=RDFS.subClassOf, object=self.identifier): yield s # def __iter__(self): # for s in self.graph.subjects( # predicate=RDFS.subClassOf,object=self.identifier): # yield Class(s,skipOWLClassMembership=True) def __repr__(self, full=False, normalization=True): """ Returns the Manchester Syntax equivalent for this class """ exprs = [] sc = list(self.subClassOf) ec = list(self.equivalentClass) for boolClass, p, rdfList in self.graph.triples_choices( (self.identifier, [OWL.intersectionOf, OWL.unionOf], None) ): ec.append(manchesterSyntax(rdfList, self.graph, boolean=p)) dc = list(self.disjointWith) c = self.complementOf if c: dc.append(c) klassKind = "" label = list(self.graph.objects(self.identifier, RDFS.label)) label = label and "(" + label[0] + ")" or "" if sc: if full: scJoin = "\n " else: scJoin = ", " necStatements = [ isinstance(s, Class) and isinstance(self.identifier, BNode) and repr(CastClass(s, self.graph)) or # repr(BooleanClass(classOrIdentifier(s), # operator=None, # graph=self.graph)) or manchesterSyntax(classOrIdentifier(s), self.graph) for s in sc ] if necStatements: klassKind = "Primitive Type %s" % label exprs.append( "SubClassOf: %s" % scJoin.join([str(n) for n in necStatements]) ) if full: exprs[-1] = "\n " + exprs[-1] if ec: nec_SuffStatements = [ isinstance(s, str) and s or manchesterSyntax(classOrIdentifier(s), self.graph) for s in ec ] if nec_SuffStatements: klassKind = "A Defined Class %s" % label exprs.append("EquivalentTo: %s" % ", ".join(nec_SuffStatements)) if full: exprs[-1] = "\n " + exprs[-1] if dc: exprs.append( "DisjointWith %s\n" % "\n ".join( [manchesterSyntax(classOrIdentifier(s), self.graph) for s in dc] ) ) if full: exprs[-1] = "\n " + exprs[-1] descr = list(self.graph.objects(self.identifier, RDFS.comment)) if full and normalization: klassDescr = ( klassKind and "\n ## %s ##" % klassKind + (descr and "\n %s" % descr[0] or "") + " . ".join(exprs) or " . ".join(exprs) ) else: klassDescr = ( full and (descr and "\n %s" % descr[0] or "") or "" + " . ".join(exprs) ) return ( isinstance(self.identifier, BNode) and "Some Class " or "Class: %s " % self.qname ) + klassDescr class OWLRDFListProxy(object): def __init__(self, rdfList, members=None, graph=None): if graph: self.graph = graph members = members and members or [] if rdfList: self._rdfList = Collection(self.graph, rdfList[0]) for member in members: if member not in self._rdfList: self._rdfList.append(classOrIdentifier(member)) else: self._rdfList = Collection( self.graph, BNode(), [classOrIdentifier(m) for m in members] ) self.graph.add((self.identifier, self._operator, self._rdfList.uri)) def __eq__(self, other): """ Equivalence of boolean class constructors is determined by equivalence of its members """ assert isinstance(other, Class), repr(other) + repr(type(other)) if isinstance(other, BooleanClass): length = len(self) if length != len(other): return False else: for idx in range(length): if self[idx] != other[idx]: return False return True else: return self.identifier == other.identifier # Redirect python list accessors to the underlying Collection instance def __len__(self): return len(self._rdfList) def index(self, item): return self._rdfList.index(classOrIdentifier(item)) def __getitem__(self, key): return self._rdfList[key] def __setitem__(self, key, value): self._rdfList[key] = classOrIdentifier(value) def __delitem__(self, key): del self._rdfList[key] def clear(self): self._rdfList.clear() def __iter__(self): for item in self._rdfList: yield item def __contains__(self, item): for i in self._rdfList: if i == classOrIdentifier(item): return 1 return 0 def append(self, item): self._rdfList.append(item) def __iadd__(self, other): self._rdfList.append(classOrIdentifier(other)) return self class EnumeratedClass(OWLRDFListProxy, Class): """ Class for owl:oneOf forms: OWL Abstract Syntax is used axiom ::= 'EnumeratedClass(' classID ['Deprecated'] { annotation } { individualID } ')' >>> exNs = Namespace('http://example.com/') >>> namespace_manager = NamespaceManager(Graph()) >>> namespace_manager.bind('ex', exNs, override=False) >>> namespace_manager.bind('owl', OWL, override=False) >>> g = Graph() >>> g.namespace_manager = namespace_manager >>> Individual.factoryGraph = g >>> ogbujiBros = EnumeratedClass(exNs.ogbujicBros, ... members=[exNs.chime, ... exNs.uche, ... exNs.ejike]) >>> ogbujiBros #doctest: +SKIP { ex:chime ex:uche ex:ejike } >>> col = Collection(g, first( ... g.objects(predicate=OWL.oneOf, subject=ogbujiBros.identifier))) >>> [g.qname(item) for item in col] [u'ex:chime', u'ex:uche', u'ex:ejike'] >>> print(g.serialize(format='n3')) #doctest: +SKIP @prefix ex: . @prefix owl: . @prefix rdf: . ex:ogbujicBros a owl:Class; owl:oneOf ( ex:chime ex:uche ex:ejike ) . """ _operator = OWL.oneOf def isPrimitive(self): return False def __init__(self, identifier=None, members=None, graph=None): Class.__init__(self, identifier, graph=graph) members = members and members or [] rdfList = list(self.graph.objects(predicate=OWL.oneOf, subject=self.identifier)) OWLRDFListProxy.__init__(self, rdfList, members) def __repr__(self): """ Returns the Manchester Syntax equivalent for this class """ return manchesterSyntax(self._rdfList.uri, self.graph, boolean=self._operator) def serialize(self, graph): clonedList = Collection(graph, BNode()) for cl in self._rdfList: clonedList.append(cl) CastClass(cl, self.graph).serialize(graph) graph.add((self.identifier, self._operator, clonedList.uri)) for s, p, o in self.graph.triples((self.identifier, None, None)): if p != self._operator: graph.add((s, p, o)) self._serialize(graph) BooleanPredicates = [OWL.intersectionOf, OWL.unionOf] class BooleanClassExtentHelper: """ >>> testGraph = Graph() >>> Individual.factoryGraph = testGraph >>> EX = Namespace("http://example.com/") >>> namespace_manager = NamespaceManager(Graph()) >>> namespace_manager.bind('ex', EX, override=False) >>> testGraph.namespace_manager = namespace_manager >>> fire = Class(EX.Fire) >>> water = Class(EX.Water) >>> testClass = BooleanClass(members=[fire, water]) >>> testClass2 = BooleanClass( ... operator=OWL.unionOf, members=[fire, water]) >>> for c in BooleanClass.getIntersections(): ... print(c) #doctest: +SKIP ( ex:Fire AND ex:Water ) >>> for c in BooleanClass.getUnions(): ... print(c) #doctest: +SKIP ( ex:Fire OR ex:Water ) """ def __init__(self, operator): self.operator = operator def __call__(self, f): def _getExtent(): for c in Individual.factoryGraph.subjects(self.operator): yield BooleanClass(c, operator=self.operator) return _getExtent class Callable: def __init__(self, anycallable): self.__call__ = anycallable class BooleanClass(OWLRDFListProxy, Class): """ See: http://www.w3.org/TR/owl-ref/#Boolean owl:complementOf is an attribute of Class, however """ @BooleanClassExtentHelper(OWL.intersectionOf) @Callable def getIntersections(): # type: ignore[misc] pass getIntersections = Callable(getIntersections) @BooleanClassExtentHelper(OWL.unionOf) @Callable def getUnions(): # type: ignore[misc] pass getUnions = Callable(getUnions) def __init__( self, identifier=None, operator=OWL.intersectionOf, members=None, graph=None ): if operator is None: props = [] for s, p, o in graph.triples_choices( (identifier, [OWL.intersectionOf, OWL.unionOf], None) ): props.append(p) operator = p assert len(props) == 1, repr(props) Class.__init__(self, identifier, graph=graph) assert operator in [OWL.intersectionOf, OWL.unionOf], str(operator) self._operator = operator rdfList = list(self.graph.objects(predicate=operator, subject=self.identifier)) assert ( not members or not rdfList ), "This is a previous boolean class description!" + repr( Collection(self.graph, rdfList[0]).n3() ) OWLRDFListProxy.__init__(self, rdfList, members) def copy(self): """ Create a copy of this class """ copyOfClass = BooleanClass( operator=self._operator, members=list(self), graph=self.graph ) return copyOfClass def serialize(self, graph): clonedList = Collection(graph, BNode()) for cl in self._rdfList: clonedList.append(cl) CastClass(cl, self.graph).serialize(graph) graph.add((self.identifier, self._operator, clonedList.uri)) for s, p, o in self.graph.triples((self.identifier, None, None)): if p != self._operator: graph.add((s, p, o)) self._serialize(graph) def isPrimitive(self): return False def changeOperator(self, newOperator): """ Converts a unionOf / intersectionOf class expression into one that instead uses the given operator >>> testGraph = Graph() >>> Individual.factoryGraph = testGraph >>> EX = Namespace("http://example.com/") >>> namespace_manager = NamespaceManager(Graph()) >>> namespace_manager.bind('ex', EX, override=False) >>> testGraph.namespace_manager = namespace_manager >>> fire = Class(EX.Fire) >>> water = Class(EX.Water) >>> testClass = BooleanClass(members=[fire,water]) >>> testClass #doctest: +SKIP ( ex:Fire AND ex:Water ) >>> testClass.changeOperator(OWL.unionOf) >>> testClass #doctest: +SKIP ( ex:Fire OR ex:Water ) >>> try: testClass.changeOperator(OWL.unionOf) ... except Exception as e: print(e) The new operator is already being used! """ assert newOperator != self._operator, "The new operator is already being used!" self.graph.remove((self.identifier, self._operator, self._rdfList.uri)) self.graph.add((self.identifier, newOperator, self._rdfList.uri)) self._operator = newOperator def __repr__(self): """ Returns the Manchester Syntax equivalent for this class """ return manchesterSyntax(self._rdfList.uri, self.graph, boolean=self._operator) def __or__(self, other): """ Adds other to the list and returns self """ assert self._operator == OWL.unionOf self._rdfList.append(classOrIdentifier(other)) return self def AllDifferent(members): """ DisjointClasses(' description description { description } ')' """ pass class Restriction(Class): """ restriction ::= 'restriction(' datavaluedPropertyID dataRestrictionComponent { dataRestrictionComponent } ')' | 'restriction(' individualvaluedPropertyID individualRestrictionComponent { individualRestrictionComponent } ')' """ restrictionKinds = [ OWL.allValuesFrom, OWL.someValuesFrom, OWL.hasValue, OWL.maxCardinality, OWL.minCardinality, ] def __init__( self, onProperty, graph=Graph(), allValuesFrom=None, someValuesFrom=None, value=None, cardinality=None, maxCardinality=None, minCardinality=None, identifier=None, ): super(Restriction, self).__init__( identifier, graph=graph, skipOWLClassMembership=True ) if ( self.identifier, OWL.onProperty, propertyOrIdentifier(onProperty), ) not in graph: graph.add( (self.identifier, OWL.onProperty, propertyOrIdentifier(onProperty)) ) self.onProperty = onProperty restrTypes = [ (allValuesFrom, OWL.allValuesFrom), (someValuesFrom, OWL.someValuesFrom), (value, OWL.hasValue), (cardinality, OWL.cardinality), (maxCardinality, OWL.maxCardinality), (minCardinality, OWL.minCardinality), ] validRestrProps = [(i, oTerm) for (i, oTerm) in restrTypes if i] assert len(validRestrProps) restrictionRange, restrictionType = validRestrProps.pop() self.restrictionType = restrictionType if isinstance(restrictionRange, Identifier): self.restrictionRange = restrictionRange elif isinstance(restrictionRange, Class): self.restrictionRange = classOrIdentifier(restrictionRange) else: self.restrictionRange = first( self.graph.objects(self.identifier, restrictionType) ) if (self.identifier, restrictionType, self.restrictionRange) not in self.graph: self.graph.add((self.identifier, restrictionType, self.restrictionRange)) assert self.restrictionRange is not None, Class(self.identifier) if (self.identifier, RDF.type, OWL.Restriction) not in self.graph: self.graph.add((self.identifier, RDF.type, OWL.Restriction)) self.graph.remove((self.identifier, RDF.type, OWL.Class)) def serialize(self, graph): """ >>> g1 = Graph() >>> g2 = Graph() >>> EX = Namespace("http://example.com/") >>> namespace_manager = NamespaceManager(g1) >>> namespace_manager.bind('ex', EX, override=False) >>> namespace_manager = NamespaceManager(g2) >>> namespace_manager.bind('ex', EX, override=False) >>> Individual.factoryGraph = g1 >>> prop = Property(EX.someProp, baseType=OWL.DatatypeProperty) >>> restr1 = (Property( ... EX.someProp, ... baseType=OWL.DatatypeProperty)) | some | (Class(EX.Foo)) >>> restr1 #doctest: +SKIP ( ex:someProp SOME ex:Foo ) >>> restr1.serialize(g2) >>> Individual.factoryGraph = g2 >>> list(Property( ... EX.someProp,baseType=None).type ... ) #doctest: +NORMALIZE_WHITESPACE +SKIP [rdflib.term.URIRef( u'http://www.w3.org/2002/07/owl#DatatypeProperty')] """ Property(self.onProperty, graph=self.graph, baseType=None).serialize(graph) for s, p, o in self.graph.triples((self.identifier, None, None)): graph.add((s, p, o)) if p in [OWL.allValuesFrom, OWL.someValuesFrom]: CastClass(o, self.graph).serialize(graph) def isPrimitive(self): return False def __hash__(self): return hash((self.onProperty, self.restrictionRange)) def __eq__(self, other): """ Equivalence of restrictions is determined by equivalence of the property in question and the restriction 'range' """ assert isinstance(other, Class), repr(other) + repr(type(other)) if isinstance(other, Restriction): return ( other.onProperty == self.onProperty and other.restrictionRange == self.restrictionRange ) else: return False def _get_onProperty(self): return list( self.graph.objects(subject=self.identifier, predicate=OWL.onProperty) )[0] def _set_onProperty(self, prop): triple = (self.identifier, OWL.onProperty, propertyOrIdentifier(prop)) if not prop: return elif triple in self.graph: return else: self.graph.set(triple) @TermDeletionHelper(OWL.onProperty) def _del_onProperty(self): pass onProperty = property(_get_onProperty, _set_onProperty, _del_onProperty) def _get_allValuesFrom(self): for i in self.graph.objects( subject=self.identifier, predicate=OWL.allValuesFrom ): return Class(i, graph=self.graph) return None def _set_allValuesFrom(self, other): triple = (self.identifier, OWL.allValuesFrom, classOrIdentifier(other)) if not other: return elif triple in self.graph: return else: self.graph.set(triple) @TermDeletionHelper(OWL.allValuesFrom) def _del_allValuesFrom(self): pass allValuesFrom = property(_get_allValuesFrom, _set_allValuesFrom, _del_allValuesFrom) def _get_someValuesFrom(self): for i in self.graph.objects( subject=self.identifier, predicate=OWL.someValuesFrom ): return Class(i, graph=self.graph) return None def _set_someValuesFrom(self, other): triple = (self.identifier, OWL.someValuesFrom, classOrIdentifier(other)) if not other: return elif triple in self.graph: return else: self.graph.set(triple) @TermDeletionHelper(OWL.someValuesFrom) def _del_someValuesFrom(self): pass someValuesFrom = property( _get_someValuesFrom, _set_someValuesFrom, _del_someValuesFrom ) def _get_hasValue(self): for i in self.graph.objects(subject=self.identifier, predicate=OWL.hasValue): return Class(i, graph=self.graph) return None def _set_hasValue(self, other): triple = (self.identifier, OWL.hasValue, classOrIdentifier(other)) if not other: return elif triple in self.graph: return else: self.graph.set(triple) @TermDeletionHelper(OWL.hasValue) def _del_hasValue(self): pass hasValue = property(_get_hasValue, _set_hasValue, _del_hasValue) def _get_cardinality(self): for i in self.graph.objects(subject=self.identifier, predicate=OWL.cardinality): return Class(i, graph=self.graph) return None def _set_cardinality(self, other): triple = (self.identifier, OWL.cardinality, classOrIdentifier(other)) if not other: return elif triple in self.graph: return else: self.graph.set(triple) @TermDeletionHelper(OWL.cardinality) def _del_cardinality(self): pass cardinality = property(_get_cardinality, _set_cardinality, _del_cardinality) def _get_maxCardinality(self): for i in self.graph.objects( subject=self.identifier, predicate=OWL.maxCardinality ): return Class(i, graph=self.graph) return None def _set_maxCardinality(self, other): triple = (self.identifier, OWL.maxCardinality, classOrIdentifier(other)) if not other: return elif triple in self.graph: return else: self.graph.set(triple) @TermDeletionHelper(OWL.maxCardinality) def _del_maxCardinality(self): pass maxCardinality = property( _get_maxCardinality, _set_maxCardinality, _del_maxCardinality ) def _get_minCardinality(self): for i in self.graph.objects( subject=self.identifier, predicate=OWL.minCardinality ): return Class(i, graph=self.graph) return None def _set_minCardinality(self, other): triple = (self.identifier, OWL.minCardinality, classOrIdentifier(other)) if not other: return elif triple in self.graph: return else: self.graph.set(triple) @TermDeletionHelper(OWL.minCardinality) def _del_minCardinality(self): pass minCardinality = property( _get_minCardinality, _set_minCardinality, _del_minCardinality ) def restrictionKind(self): for p in self.graph.triple_choices( (self.identifier, self.restrictionKinds, None) ): return p.split(OWL)[-1] raise def __repr__(self): """ Returns the Manchester Syntax equivalent for this restriction """ return manchesterSyntax(self.identifier, self.graph) # Infix Operators # some = Infix( lambda prop, _class: Restriction(prop, graph=_class.graph, someValuesFrom=_class) ) only = Infix( lambda prop, _class: Restriction(prop, graph=_class.graph, allValuesFrom=_class) ) max = Infix( lambda prop, _class: Restriction(prop, graph=prop.graph, maxCardinality=_class) ) min = Infix( lambda prop, _class: Restriction(prop, graph=prop.graph, minCardinality=_class) ) exactly = Infix( lambda prop, _class: Restriction(prop, graph=prop.graph, cardinality=_class) ) value = Infix(lambda prop, _class: Restriction(prop, graph=prop.graph, value=_class)) PropertyAbstractSyntax = """ %s( %s { %s } %s { 'super(' datavaluedPropertyID ')'} ['Functional'] { domain( %s ) } { range( %s ) } )""" class Property(AnnotatableTerms): """ axiom ::= 'DatatypeProperty(' datavaluedPropertyID ['Deprecated'] { annotation } { 'super(' datavaluedPropertyID ')'} ['Functional'] { 'domain(' description ')' } { 'range(' dataRange ')' } ')' | 'ObjectProperty(' individualvaluedPropertyID ['Deprecated'] { annotation } { 'super(' individualvaluedPropertyID ')' } [ 'inverseOf(' individualvaluedPropertyID ')' ] [ 'Symmetric' ] [ 'Functional' | 'InverseFunctional' | 'Functional' 'InverseFunctional' | 'Transitive' ] { 'domain(' description ')' } { 'range(' description ')' } ') """ def setupVerbAnnotations(self, verbAnnotations): if isinstance(verbAnnotations, tuple): TV_sgProp, TV_plProp, TV_vbg = verbAnnotations else: TV_sgProp = verbAnnotations TV_plProp = verbAnnotations TV_vbg = verbAnnotations if TV_sgProp: self.TV_sgProp.extent = [ (self.identifier, self.handleAnnotation(TV_sgProp)) ] if TV_plProp: self.TV_plProp.extent = [ (self.identifier, self.handleAnnotation(TV_plProp)) ] if TV_vbg: self.TV_vbgProp.extent = [(self.identifier, self.handleAnnotation(TV_vbg))] def __init__( self, identifier=None, graph=None, baseType=OWL.ObjectProperty, subPropertyOf=None, domain=None, range=None, inverseOf=None, otherType=None, equivalentProperty=None, comment=None, verbAnnotations=None, nameAnnotation=None, nameIsLabel=False, ): super(Property, self).__init__(identifier, graph, nameAnnotation, nameIsLabel) if verbAnnotations: self.setupVerbAnnotations(verbAnnotations) assert not isinstance(self.identifier, BNode) if baseType is None: # None give, determine via introspection self._baseType = first(Individual(self.identifier, graph=self.graph).type) else: if (self.identifier, RDF.type, baseType) not in self.graph: self.graph.add((self.identifier, RDF.type, baseType)) self._baseType = baseType self.subPropertyOf = subPropertyOf self.inverseOf = inverseOf self.domain = domain self.range = range self.comment = comment and comment or [] def serialize(self, graph): for fact in self.graph.triples((self.identifier, None, None)): graph.add(fact) for p in itertools.chain(self.subPropertyOf, self.inverseOf): p.serialize(graph) for c in itertools.chain(self.domain, self.range): CastClass(c, self.graph).serialize(graph) def _get_extent(self, graph=None): for triple in (graph is None and self.graph or graph).triples( (None, self.identifier, None) ): yield triple def _set_extent(self, other): if not other: return for subj, obj in other: self.graph.add((subj, self.identifier, obj)) extent = property(_get_extent, _set_extent) def __repr__(self): rt = [] if OWL.ObjectProperty in self.type: rt.append( "ObjectProperty( %s annotation(%s)" % (self.qname, first(self.comment) and first(self.comment) or "") ) if first(self.inverseOf): twoLinkInverse = first(first(self.inverseOf).inverseOf) if twoLinkInverse and twoLinkInverse.identifier == self.identifier: inverseRepr = first(self.inverseOf).qname else: inverseRepr = repr(first(self.inverseOf)) rt.append( " inverseOf( %s )%s" % ( inverseRepr, OWL.SymmetricProperty in self.type and " Symmetric" or "", ) ) for s, p, roleType in self.graph.triples_choices( ( self.identifier, RDF.type, [ OWL.FunctionalProperty, OWL.InverseFunctionalProperty, OWL.TransitiveProperty, ], ) ): rt.append(str(roleType.split(OWL)[-1])) else: rt.append( "DatatypeProperty( %s %s" % (self.qname, first(self.comment) and first(self.comment) or "") ) for s, p, roleType in self.graph.triples( (self.identifier, RDF.type, OWL.FunctionalProperty) ): rt.append(" Functional") def canonicalName(term, g): normalizedName = classOrIdentifier(term) if isinstance(normalizedName, BNode): return term elif normalizedName.startswith(XSD): return str(term) elif first( g.triples_choices( (normalizedName, [OWL.unionOf, OWL.intersectionOf], None) ) ): return repr(term) else: return str(term.qname) rt.append( " ".join( [ " super( %s )" % canonicalName(superP, self.graph) for superP in self.subPropertyOf ] ) ) rt.append( " ".join( [ " domain( %s )" % canonicalName(domain, self.graph) for domain in self.domain ] ) ) rt.append( " ".join( [ " range( %s )" % canonicalName(range, self.graph) for range in self.range ] ) ) rt = "\n".join([expr for expr in rt if expr]) rt += "\n)" return str(rt).encode("utf-8") def _get_subPropertyOf(self): for anc in self.graph.objects( subject=self.identifier, predicate=RDFS.subPropertyOf ): yield Property(anc, graph=self.graph, baseType=None) def _set_subPropertyOf(self, other): if not other: return for sP in other: self.graph.add((self.identifier, RDFS.subPropertyOf, classOrIdentifier(sP))) @TermDeletionHelper(RDFS.subPropertyOf) def _del_subPropertyOf(self): pass subPropertyOf = property(_get_subPropertyOf, _set_subPropertyOf, _del_subPropertyOf) def _get_inverseOf(self): for anc in self.graph.objects(subject=self.identifier, predicate=OWL.inverseOf): yield Property(anc, graph=self.graph, baseType=None) def _set_inverseOf(self, other): if not other: return self.graph.add((self.identifier, OWL.inverseOf, classOrIdentifier(other))) @TermDeletionHelper(OWL.inverseOf) def _del_inverseOf(self): pass inverseOf = property(_get_inverseOf, _set_inverseOf, _del_inverseOf) def _get_domain(self): for dom in self.graph.objects(subject=self.identifier, predicate=RDFS.domain): yield Class(dom, graph=self.graph) def _set_domain(self, other): if not other: return if isinstance(other, (Individual, Identifier)): self.graph.add((self.identifier, RDFS.domain, classOrIdentifier(other))) else: for dom in other: self.graph.add((self.identifier, RDFS.domain, classOrIdentifier(dom))) @TermDeletionHelper(RDFS.domain) def _del_domain(self): pass domain = property(_get_domain, _set_domain, _del_domain) def _get_range(self): for ran in self.graph.objects(subject=self.identifier, predicate=RDFS.range): yield Class(ran, graph=self.graph) def _set_range(self, ranges): if not ranges: return if isinstance(ranges, (Individual, Identifier)): self.graph.add((self.identifier, RDFS.range, classOrIdentifier(ranges))) else: for range in ranges: self.graph.add((self.identifier, RDFS.range, classOrIdentifier(range))) @TermDeletionHelper(RDFS.range) def _del_range(self): pass range = property(_get_range, _set_range, _del_range) def replace(self, other): # extension = [] for s, p, o in self.extent: self.graph.add((s, propertyOrIdentifier(other), o)) self.graph.remove((None, self.identifier, None)) def CommonNSBindings(graph, additionalNS={}): """ Takes a graph and binds the common namespaces (rdf,rdfs, & owl) """ namespace_manager = NamespaceManager(graph) namespace_manager.bind("rdfs", RDFS) namespace_manager.bind("rdf", RDF) namespace_manager.bind("owl", OWL) for prefix, uri in list(additionalNS.items()): namespace_manager.bind(prefix, uri, override=False) graph.namespace_manager = namespace_manager def test(): import doctest doctest.testmod() if __name__ == "__main__": test() rdflib-6.1.1/rdflib/graph.py000066400000000000000000002425571415774155300157310ustar00rootroot00000000000000from typing import ( IO, Any, Iterable, Optional, Union, Type, cast, overload, Generator, Tuple, ) import logging from warnings import warn import random from rdflib.namespace import Namespace, RDF from rdflib import plugin, exceptions, query, namespace import rdflib.term from rdflib.term import BNode, Node, URIRef, Literal, Genid from rdflib.paths import Path from rdflib.store import Store from rdflib.serializer import Serializer from rdflib.parser import Parser, create_input_source from rdflib.namespace import NamespaceManager from rdflib.resource import Resource from rdflib.collection import Collection import rdflib.util # avoid circular dependency from rdflib.exceptions import ParserError import os import shutil import tempfile import pathlib from io import BytesIO from urllib.parse import urlparse from urllib.request import url2pathname assert Literal # avoid warning assert Namespace # avoid warning logger = logging.getLogger(__name__) # Type aliases to make unpacking what's going on a little more human friendly ContextNode = Union[BNode, URIRef] DatasetQuad = Tuple[Node, URIRef, Node, Optional[ContextNode]] __doc__ = """\ RDFLib defines the following kinds of Graphs: * :class:`~rdflib.graph.Graph` * :class:`~rdflib.graph.QuotedGraph` * :class:`~rdflib.graph.ConjunctiveGraph` * :class:`~rdflib.graph.Dataset` Graph ----- An RDF graph is a set of RDF triples. Graphs support the python ``in`` operator, as well as iteration and some operations like union, difference and intersection. see :class:`~rdflib.graph.Graph` Conjunctive Graph ----------------- A Conjunctive Graph is the most relevant collection of graphs that are considered to be the boundary for closed world assumptions. This boundary is equivalent to that of the store instance (which is itself uniquely identified and distinct from other instances of :class:`Store` that signify other Conjunctive Graphs). It is equivalent to all the named graphs within it and associated with a ``_default_`` graph which is automatically assigned a :class:`BNode` for an identifier - if one isn't given. see :class:`~rdflib.graph.ConjunctiveGraph` Quoted graph ------------ The notion of an RDF graph [14] is extended to include the concept of a formula node. A formula node may occur wherever any other kind of node can appear. Associated with a formula node is an RDF graph that is completely disjoint from all other graphs; i.e. has no nodes in common with any other graph. (It may contain the same labels as other RDF graphs; because this is, by definition, a separate graph, considerations of tidiness do not apply between the graph at a formula node and any other graph.) This is intended to map the idea of "{ N3-expression }" that is used by N3 into an RDF graph upon which RDF semantics is defined. see :class:`~rdflib.graph.QuotedGraph` Dataset ------- The RDF 1.1 Dataset, a small extension to the Conjunctive Graph. The primary term is "graphs in the datasets" and not "contexts with quads" so there is a separate method to set/retrieve a graph in a dataset and to operate with dataset graphs. As a consequence of this approach, dataset graphs cannot be identified with blank nodes, a name is always required (RDFLib will automatically add a name if one is not provided at creation time). This implementation includes a convenience method to directly add a single quad to a dataset graph. see :class:`~rdflib.graph.Dataset` Working with graphs =================== Instantiating Graphs with default store (Memory) and default identifier (a BNode): >>> g = Graph() >>> g.store.__class__ >>> g.identifier.__class__ Instantiating Graphs with a Memory store and an identifier - : >>> g = Graph('Memory', URIRef("http://rdflib.net")) >>> g.identifier rdflib.term.URIRef('http://rdflib.net') >>> str(g) # doctest: +NORMALIZE_WHITESPACE " a rdfg:Graph;rdflib:storage [a rdflib:Store;rdfs:label 'Memory']." Creating a ConjunctiveGraph - The top level container for all named Graphs in a "database": >>> g = ConjunctiveGraph() >>> str(g.default_context) "[a rdfg:Graph;rdflib:storage [a rdflib:Store;rdfs:label 'Memory']]." Adding / removing reified triples to Graph and iterating over it directly or via triple pattern: >>> g = Graph() >>> statementId = BNode() >>> print(len(g)) 0 >>> g.add((statementId, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS )> >>> g.add((statementId, RDF.subject, ... URIRef("http://rdflib.net/store/ConjunctiveGraph"))) # doctest: +ELLIPSIS )> >>> g.add((statementId, RDF.predicate, namespace.RDFS.label)) # doctest: +ELLIPSIS )> >>> g.add((statementId, RDF.object, Literal("Conjunctive Graph"))) # doctest: +ELLIPSIS )> >>> print(len(g)) 4 >>> for s, p, o in g: ... print(type(s)) ... >>> for s, p, o in g.triples((None, RDF.object, None)): ... print(o) ... Conjunctive Graph >>> g.remove((statementId, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS )> >>> print(len(g)) 3 ``None`` terms in calls to :meth:`~rdflib.graph.Graph.triples` can be thought of as "open variables". Graph support set-theoretic operators, you can add/subtract graphs, as well as intersection (with multiplication operator g1*g2) and xor (g1 ^ g2). Note that BNode IDs are kept when doing set-theoretic operations, this may or may not be what you want. Two named graphs within the same application probably want share BNode IDs, two graphs with data from different sources probably not. If your BNode IDs are all generated by RDFLib they are UUIDs and unique. >>> g1 = Graph() >>> g2 = Graph() >>> u = URIRef("http://example.com/foo") >>> g1.add([u, namespace.RDFS.label, Literal("foo")]) # doctest: +ELLIPSIS )> >>> g1.add([u, namespace.RDFS.label, Literal("bar")]) # doctest: +ELLIPSIS )> >>> g2.add([u, namespace.RDFS.label, Literal("foo")]) # doctest: +ELLIPSIS )> >>> g2.add([u, namespace.RDFS.label, Literal("bing")]) # doctest: +ELLIPSIS )> >>> len(g1 + g2) # adds bing as label 3 >>> len(g1 - g2) # removes foo 1 >>> len(g1 * g2) # only foo 1 >>> g1 += g2 # now g1 contains everything Graph Aggregation - ConjunctiveGraphs and ReadOnlyGraphAggregate within the same store: >>> store = plugin.get("Memory", Store)() >>> g1 = Graph(store) >>> g2 = Graph(store) >>> g3 = Graph(store) >>> stmt1 = BNode() >>> stmt2 = BNode() >>> stmt3 = BNode() >>> g1.add((stmt1, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS )> >>> g1.add((stmt1, RDF.subject, ... URIRef('http://rdflib.net/store/ConjunctiveGraph'))) # doctest: +ELLIPSIS )> >>> g1.add((stmt1, RDF.predicate, namespace.RDFS.label)) # doctest: +ELLIPSIS )> >>> g1.add((stmt1, RDF.object, Literal('Conjunctive Graph'))) # doctest: +ELLIPSIS )> >>> g2.add((stmt2, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS )> >>> g2.add((stmt2, RDF.subject, ... URIRef('http://rdflib.net/store/ConjunctiveGraph'))) # doctest: +ELLIPSIS )> >>> g2.add((stmt2, RDF.predicate, RDF.type)) # doctest: +ELLIPSIS )> >>> g2.add((stmt2, RDF.object, namespace.RDFS.Class)) # doctest: +ELLIPSIS )> >>> g3.add((stmt3, RDF.type, RDF.Statement)) # doctest: +ELLIPSIS )> >>> g3.add((stmt3, RDF.subject, ... URIRef('http://rdflib.net/store/ConjunctiveGraph'))) # doctest: +ELLIPSIS )> >>> g3.add((stmt3, RDF.predicate, namespace.RDFS.comment)) # doctest: +ELLIPSIS )> >>> g3.add((stmt3, RDF.object, Literal( ... 'The top-level aggregate graph - The sum ' + ... 'of all named graphs within a Store'))) # doctest: +ELLIPSIS )> >>> len(list(ConjunctiveGraph(store).subjects(RDF.type, RDF.Statement))) 3 >>> len(list(ReadOnlyGraphAggregate([g1,g2]).subjects( ... RDF.type, RDF.Statement))) 2 ConjunctiveGraphs have a :meth:`~rdflib.graph.ConjunctiveGraph.quads` method which returns quads instead of triples, where the fourth item is the Graph (or subclass thereof) instance in which the triple was asserted: >>> uniqueGraphNames = set( ... [graph.identifier for s, p, o, graph in ConjunctiveGraph(store ... ).quads((None, RDF.predicate, None))]) >>> len(uniqueGraphNames) 3 >>> unionGraph = ReadOnlyGraphAggregate([g1, g2]) >>> uniqueGraphNames = set( ... [graph.identifier for s, p, o, graph in unionGraph.quads( ... (None, RDF.predicate, None))]) >>> len(uniqueGraphNames) 2 Parsing N3 from a string >>> g2 = Graph() >>> src = ''' ... @prefix rdf: . ... @prefix rdfs: . ... [ a rdf:Statement ; ... rdf:subject ; ... rdf:predicate rdfs:label; ... rdf:object "Conjunctive Graph" ] . ... ''' >>> g2 = g2.parse(data=src, format="n3") >>> print(len(g2)) 4 Using Namespace class: >>> RDFLib = Namespace("http://rdflib.net/") >>> RDFLib.ConjunctiveGraph rdflib.term.URIRef('http://rdflib.net/ConjunctiveGraph') >>> RDFLib["Graph"] rdflib.term.URIRef('http://rdflib.net/Graph') """ __all__ = [ "Graph", "ConjunctiveGraph", "QuotedGraph", "Seq", "ModificationException", "Dataset", "UnSupportedAggregateOperation", "ReadOnlyGraphAggregate", "BatchAddGraph", ] class Graph(Node): """An RDF Graph The constructor accepts one argument, the "store" that will be used to store the graph data (see the "store" package for stores currently shipped with rdflib). Stores can be context-aware or unaware. Unaware stores take up (some) less space but cannot support features that require context, such as true merging/demerging of sub-graphs and provenance. The Graph constructor can take an identifier which identifies the Graph by name. If none is given, the graph is assigned a BNode for its identifier. For more on named graphs, see: http://www.w3.org/2004/03/trix/ """ def __init__( self, store: Union[Store, str] = "default", identifier: Optional[Union[Node, str]] = None, namespace_manager: Optional[NamespaceManager] = None, base: Optional[str] = None, ): super(Graph, self).__init__() self.base = base self.__identifier: Node self.__identifier = identifier or BNode() # type: ignore[assignment] if not isinstance(self.__identifier, Node): self.__identifier = URIRef(self.__identifier) # type: ignore[unreachable] self.__store: Store if not isinstance(store, Store): # TODO: error handling self.__store = store = plugin.get(store, Store)() else: self.__store = store self.__namespace_manager = namespace_manager self.context_aware = False self.formula_aware = False self.default_union = False def __get_store(self): return self.__store store = property(__get_store) # read-only attr def __get_identifier(self): return self.__identifier identifier = property(__get_identifier) # read-only attr def _get_namespace_manager(self): if self.__namespace_manager is None: self.__namespace_manager = NamespaceManager(self) return self.__namespace_manager def _set_namespace_manager(self, nm): self.__namespace_manager = nm namespace_manager = property( _get_namespace_manager, _set_namespace_manager, doc="this graph's namespace-manager", ) def __repr__(self): return "" % (self.identifier, type(self)) def __str__(self): if isinstance(self.identifier, URIRef): return ( "%s a rdfg:Graph;rdflib:storage " + "[a rdflib:Store;rdfs:label '%s']." ) % (self.identifier.n3(), self.store.__class__.__name__) else: return ( "[a rdfg:Graph;rdflib:storage " + "[a rdflib:Store;rdfs:label '%s']]." ) % self.store.__class__.__name__ def toPython(self): return self def destroy(self, configuration): """Destroy the store identified by `configuration` if supported""" self.__store.destroy(configuration) return self # Transactional interfaces (optional) def commit(self): """Commits active transactions""" self.__store.commit() return self def rollback(self): """Rollback active transactions""" self.__store.rollback() return self def open(self, configuration, create=False): """Open the graph store Might be necessary for stores that require opening a connection to a database or acquiring some resource. """ return self.__store.open(configuration, create) def close(self, commit_pending_transaction=False): """Close the graph store Might be necessary for stores that require closing a connection to a database or releasing some resource. """ return self.__store.close(commit_pending_transaction=commit_pending_transaction) def add(self, triple: Tuple[Node, Node, Node]): """Add a triple with self as context""" s, p, o = triple assert isinstance(s, Node), "Subject %s must be an rdflib term" % (s,) assert isinstance(p, Node), "Predicate %s must be an rdflib term" % (p,) assert isinstance(o, Node), "Object %s must be an rdflib term" % (o,) self.__store.add((s, p, o), self, quoted=False) return self def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]): """Add a sequence of triple with context""" self.__store.addN( (s, p, o, c) for s, p, o, c in quads if isinstance(c, Graph) and c.identifier is self.identifier and _assertnode(s, p, o) ) return self def remove(self, triple): """Remove a triple from the graph If the triple does not provide a context attribute, removes the triple from all contexts. """ self.__store.remove(triple, context=self) return self def triples( self, triple: Tuple[Optional[Node], Union[None, Path, Node], Optional[Node]] ): """Generator over the triple store Returns triples that match the given triple pattern. If triple pattern does not provide a context, all contexts will be searched. """ s, p, o = triple if isinstance(p, Path): for _s, _o in p.eval(self, s, o): yield _s, p, _o else: for (s, p, o), cg in self.__store.triples((s, p, o), context=self): yield s, p, o def __getitem__(self, item): """ A graph can be "sliced" as a shortcut for the triples method The python slice syntax is (ab)used for specifying triples. A generator over matches is returned, the returned tuples include only the parts not given >>> import rdflib >>> g = rdflib.Graph() >>> g.add((rdflib.URIRef("urn:bob"), namespace.RDFS.label, rdflib.Literal("Bob"))) # doctest: +ELLIPSIS )> >>> list(g[rdflib.URIRef("urn:bob")]) # all triples about bob [(rdflib.term.URIRef('http://www.w3.org/2000/01/rdf-schema#label'), rdflib.term.Literal('Bob'))] >>> list(g[:namespace.RDFS.label]) # all label triples [(rdflib.term.URIRef('urn:bob'), rdflib.term.Literal('Bob'))] >>> list(g[::rdflib.Literal("Bob")]) # all triples with bob as object [(rdflib.term.URIRef('urn:bob'), rdflib.term.URIRef('http://www.w3.org/2000/01/rdf-schema#label'))] Combined with SPARQL paths, more complex queries can be written concisely: Name of all Bobs friends: g[bob : FOAF.knows/FOAF.name ] Some label for Bob: g[bob : DC.title|FOAF.name|RDFS.label] All friends and friends of friends of Bob g[bob : FOAF.knows * "+"] etc. .. versionadded:: 4.0 """ if isinstance(item, slice): s, p, o = item.start, item.stop, item.step if s is None and p is None and o is None: return self.triples((s, p, o)) elif s is None and p is None: return self.subject_predicates(o) elif s is None and o is None: return self.subject_objects(p) elif p is None and o is None: return self.predicate_objects(s) elif s is None: return self.subjects(p, o) elif p is None: return self.predicates(s, o) elif o is None: return self.objects(s, p) else: # all given return (s, p, o) in self elif isinstance(item, (Path, Node)): return self.predicate_objects(item) else: raise TypeError( "You can only index a graph by a single rdflib term or path, or a slice of rdflib terms." ) def __len__(self): """Returns the number of triples in the graph If context is specified then the number of triples in the context is returned instead. """ return self.__store.__len__(context=self) def __iter__(self): """Iterates over all triples in the store""" return self.triples((None, None, None)) def __contains__(self, triple): """Support for 'triple in graph' syntax""" for triple in self.triples(triple): return True return False def __hash__(self): return hash(self.identifier) def __cmp__(self, other): if other is None: return -1 elif isinstance(other, Graph): return (self.identifier > other.identifier) - ( self.identifier < other.identifier ) else: # Note if None is considered equivalent to owl:Nothing # Then perhaps a graph with length 0 should be considered # equivalent to None (if compared to it)? return 1 def __eq__(self, other): return isinstance(other, Graph) and self.identifier == other.identifier def __lt__(self, other): return (other is None) or ( isinstance(other, Graph) and self.identifier < other.identifier ) def __le__(self, other): return self < other or self == other def __gt__(self, other): return (isinstance(other, Graph) and self.identifier > other.identifier) or ( other is not None ) def __ge__(self, other): return self > other or self == other def __iadd__(self, other): """Add all triples in Graph other to Graph. BNode IDs are not changed.""" self.addN((s, p, o, self) for s, p, o in other) return self def __isub__(self, other): """Subtract all triples in Graph other from Graph. BNode IDs are not changed.""" for triple in other: self.remove(triple) return self def __add__(self, other): """Set-theoretic union BNode IDs are not changed.""" try: retval = type(self)() except TypeError: retval = Graph() for (prefix, uri) in set(list(self.namespaces()) + list(other.namespaces())): retval.bind(prefix, uri) for x in self: retval.add(x) for y in other: retval.add(y) return retval def __mul__(self, other): """Set-theoretic intersection. BNode IDs are not changed.""" try: retval = type(self)() except TypeError: retval = Graph() for x in other: if x in self: retval.add(x) return retval def __sub__(self, other): """Set-theoretic difference. BNode IDs are not changed.""" try: retval = type(self)() except TypeError: retval = Graph() for x in self: if x not in other: retval.add(x) return retval def __xor__(self, other): """Set-theoretic XOR. BNode IDs are not changed.""" return (self - other) + (other - self) __or__ = __add__ __and__ = __mul__ # Conv. methods def set(self, triple): """Convenience method to update the value of object Remove any existing triples for subject and predicate before adding (subject, predicate, object). """ (subject, predicate, object_) = triple assert ( subject is not None ), "s can't be None in .set([s,p,o]), as it would remove (*, p, *)" assert ( predicate is not None ), "p can't be None in .set([s,p,o]), as it would remove (s, *, *)" self.remove((subject, predicate, None)) self.add((subject, predicate, object_)) return self def subjects(self, predicate=None, object=None) -> Iterable[Node]: """A generator of subjects with the given predicate and object""" for s, p, o in self.triples((None, predicate, object)): yield s def predicates(self, subject=None, object=None) -> Iterable[Node]: """A generator of predicates with the given subject and object""" for s, p, o in self.triples((subject, None, object)): yield p def objects(self, subject=None, predicate=None) -> Iterable[Node]: """A generator of objects with the given subject and predicate""" for s, p, o in self.triples((subject, predicate, None)): yield o def subject_predicates(self, object=None): """A generator of (subject, predicate) tuples for the given object""" for s, p, o in self.triples((None, None, object)): yield s, p def subject_objects(self, predicate=None): """A generator of (subject, object) tuples for the given predicate""" for s, p, o in self.triples((None, predicate, None)): yield s, o def predicate_objects(self, subject=None): """A generator of (predicate, object) tuples for the given subject""" for s, p, o in self.triples((subject, None, None)): yield p, o def triples_choices(self, triple, context=None): subject, predicate, object_ = triple for (s, p, o), cg in self.store.triples_choices( (subject, predicate, object_), context=self ): yield s, p, o def value( self, subject=None, predicate=RDF.value, object=None, default=None, any=True ): """Get a value for a pair of two criteria Exactly one of subject, predicate, object must be None. Useful if one knows that there may only be one value. It is one of those situations that occur a lot, hence this 'macro' like utility Parameters: subject, predicate, object -- exactly one must be None default -- value to be returned if no values found any -- if True, return any value in the case there is more than one, else, raise UniquenessError """ retval = default if ( (subject is None and predicate is None) or (subject is None and object is None) or (predicate is None and object is None) ): return None if object is None: values = self.objects(subject, predicate) if subject is None: values = self.subjects(predicate, object) if predicate is None: values = self.predicates(subject, object) try: retval = next(values) except StopIteration: retval = default else: if any is False: try: next(values) msg = ( "While trying to find a value for (%s, %s, %s) the" " following multiple values where found:\n" % (subject, predicate, object) ) triples = self.store.triples((subject, predicate, object), None) for (s, p, o), contexts in triples: msg += "(%s, %s, %s)\n (contexts: %s)\n" % ( s, p, o, list(contexts), ) raise exceptions.UniquenessError(msg) except StopIteration: pass return retval def label(self, subject, default=""): """Query for the RDFS.label of the subject Return default if no label exists or any label if multiple exist. """ warn( DeprecationWarning( "graph.label() is deprecated and will be removed in rdflib 6.0.0." ) ) if subject is None: return default return self.value(subject, namespace.RDFS.label, default=default, any=True) def preferredLabel( self, subject, lang=None, default=None, labelProperties=(namespace.SKOS.prefLabel, namespace.RDFS.label), ): """ Find the preferred label for subject. By default prefers skos:prefLabels over rdfs:labels. In case at least one prefLabel is found returns those, else returns labels. In case a language string (e.g., "en", "de" or even "" for no lang-tagged literals) is given, only such labels will be considered. Return a list of (labelProp, label) pairs, where labelProp is either skos:prefLabel or rdfs:label. >>> from rdflib import ConjunctiveGraph, URIRef, Literal, namespace >>> from pprint import pprint >>> g = ConjunctiveGraph() >>> u = URIRef("http://example.com/foo") >>> g.add([u, namespace.RDFS.label, Literal("foo")]) # doctest: +ELLIPSIS )> >>> g.add([u, namespace.RDFS.label, Literal("bar")]) # doctest: +ELLIPSIS )> >>> pprint(sorted(g.preferredLabel(u))) [(rdflib.term.URIRef('http://www.w3.org/2000/01/rdf-schema#label'), rdflib.term.Literal('bar')), (rdflib.term.URIRef('http://www.w3.org/2000/01/rdf-schema#label'), rdflib.term.Literal('foo'))] >>> g.add([u, namespace.SKOS.prefLabel, Literal("bla")]) # doctest: +ELLIPSIS )> >>> pprint(g.preferredLabel(u)) [(rdflib.term.URIRef('http://www.w3.org/2004/02/skos/core#prefLabel'), rdflib.term.Literal('bla'))] >>> g.add([u, namespace.SKOS.prefLabel, Literal("blubb", lang="en")]) # doctest: +ELLIPSIS )> >>> sorted(g.preferredLabel(u)) #doctest: +NORMALIZE_WHITESPACE [(rdflib.term.URIRef('http://www.w3.org/2004/02/skos/core#prefLabel'), rdflib.term.Literal('bla')), (rdflib.term.URIRef('http://www.w3.org/2004/02/skos/core#prefLabel'), rdflib.term.Literal('blubb', lang='en'))] >>> g.preferredLabel(u, lang="") #doctest: +NORMALIZE_WHITESPACE [(rdflib.term.URIRef('http://www.w3.org/2004/02/skos/core#prefLabel'), rdflib.term.Literal('bla'))] >>> pprint(g.preferredLabel(u, lang="en")) [(rdflib.term.URIRef('http://www.w3.org/2004/02/skos/core#prefLabel'), rdflib.term.Literal('blubb', lang='en'))] """ warn( DeprecationWarning( "graph.preferredLabel() is deprecated and will be removed in rdflib 6.0.0." ) ) if default is None: default = [] # setup the language filtering if lang is not None: if lang == "": # we only want not language-tagged literals def langfilter(l_): return l_.language is None else: def langfilter(l_): return l_.language == lang else: # we don't care about language tags def langfilter(l_): return True for labelProp in labelProperties: labels = list(filter(langfilter, self.objects(subject, labelProp))) if len(labels) == 0: continue else: return [(labelProp, l_) for l_ in labels] return default def comment(self, subject, default=""): """Query for the RDFS.comment of the subject Return default if no comment exists """ warn( DeprecationWarning( "graph.comment() is deprecated and will be removed in rdflib 6.0.0." ) ) if subject is None: return default return self.value(subject, namespace.RDFS.comment, default=default, any=True) def items(self, list): """Generator over all items in the resource specified by list list is an RDF collection. """ chain = set([list]) while list: item = self.value(list, RDF.first) if item is not None: yield item list = self.value(list, RDF.rest) if list in chain: raise ValueError("List contains a recursive rdf:rest reference") chain.add(list) def transitiveClosure(self, func, arg, seen=None): """ Generates transitive closure of a user-defined function against the graph >>> from rdflib.collection import Collection >>> g=Graph() >>> a=BNode("foo") >>> b=BNode("bar") >>> c=BNode("baz") >>> g.add((a,RDF.first,RDF.type)) # doctest: +ELLIPSIS )> >>> g.add((a,RDF.rest,b)) # doctest: +ELLIPSIS )> >>> g.add((b,RDF.first,namespace.RDFS.label)) # doctest: +ELLIPSIS )> >>> g.add((b,RDF.rest,c)) # doctest: +ELLIPSIS )> >>> g.add((c,RDF.first,namespace.RDFS.comment)) # doctest: +ELLIPSIS )> >>> g.add((c,RDF.rest,RDF.nil)) # doctest: +ELLIPSIS )> >>> def topList(node,g): ... for s in g.subjects(RDF.rest, node): ... yield s >>> def reverseList(node,g): ... for f in g.objects(node, RDF.first): ... print(f) ... for s in g.subjects(RDF.rest, node): ... yield s >>> [rt for rt in g.transitiveClosure( ... topList,RDF.nil)] # doctest: +NORMALIZE_WHITESPACE [rdflib.term.BNode('baz'), rdflib.term.BNode('bar'), rdflib.term.BNode('foo')] >>> [rt for rt in g.transitiveClosure( ... reverseList,RDF.nil)] # doctest: +NORMALIZE_WHITESPACE http://www.w3.org/2000/01/rdf-schema#comment http://www.w3.org/2000/01/rdf-schema#label http://www.w3.org/1999/02/22-rdf-syntax-ns#type [rdflib.term.BNode('baz'), rdflib.term.BNode('bar'), rdflib.term.BNode('foo')] """ if seen is None: seen = {} elif arg in seen: return seen[arg] = 1 for rt in func(arg, self): yield rt for rt_2 in self.transitiveClosure(func, rt, seen): yield rt_2 def transitive_objects(self, subject, predicate, remember=None): """Transitively generate objects for the ``predicate`` relationship Generated objects belong to the depth first transitive closure of the ``predicate`` relationship starting at ``subject``. """ if remember is None: remember = {} if subject in remember: return remember[subject] = 1 yield subject for object in self.objects(subject, predicate): for o in self.transitive_objects(object, predicate, remember): yield o def transitive_subjects(self, predicate, object, remember=None): """Transitively generate subjects for the ``predicate`` relationship Generated subjects belong to the depth first transitive closure of the ``predicate`` relationship starting at ``object``. """ if remember is None: remember = {} if object in remember: return remember[object] = 1 yield object for subject in self.subjects(predicate, object): for s in self.transitive_subjects(predicate, subject, remember): yield s def seq(self, subject): """Check if subject is an rdf:Seq If yes, it returns a Seq class instance, None otherwise. """ warn( DeprecationWarning( "graph.seq() is deprecated and will be removed in rdflib 6.0.0." ) ) if (subject, RDF.type, RDF.Seq) in self: return Seq(self, subject) else: return None def qname(self, uri): return self.namespace_manager.qname(uri) def compute_qname(self, uri, generate=True): return self.namespace_manager.compute_qname(uri, generate) def bind(self, prefix, namespace, override=True, replace=False): """Bind prefix to namespace If override is True will bind namespace to given prefix even if namespace was already bound to a different prefix. if replace, replace any existing prefix with the new namespace for example: graph.bind("foaf", "http://xmlns.com/foaf/0.1/") """ return self.namespace_manager.bind( prefix, namespace, override=override, replace=replace ) def namespaces(self): """Generator over all the prefix, namespace tuples""" for prefix, namespace in self.namespace_manager.namespaces(): yield prefix, namespace def absolutize(self, uri, defrag=1): """Turn uri into an absolute URI if it's not one already""" return self.namespace_manager.absolutize(uri, defrag) # no destination and non-None positional encoding @overload def serialize( self, destination: None, format: str, base: Optional[str], encoding: str, **args ) -> bytes: ... # no destination and non-None keyword encoding @overload def serialize( self, destination: None = ..., format: str = ..., base: Optional[str] = ..., *, encoding: str, **args, ) -> bytes: ... # no destination and None encoding @overload def serialize( self, destination: None = ..., format: str = ..., base: Optional[str] = ..., encoding: None = ..., **args, ) -> str: ... # non-None destination @overload def serialize( self, destination: Union[str, pathlib.PurePath, IO[bytes]], format: str = ..., base: Optional[str] = ..., encoding: Optional[str] = ..., **args, ) -> "Graph": ... # fallback @overload def serialize( self, destination: Optional[Union[str, pathlib.PurePath, IO[bytes]]] = ..., format: str = ..., base: Optional[str] = ..., encoding: Optional[str] = ..., **args, ) -> Union[bytes, str, "Graph"]: ... def serialize( self, destination: Optional[Union[str, pathlib.PurePath, IO[bytes]]] = None, format: str = "turtle", base: Optional[str] = None, encoding: Optional[str] = None, **args: Any, ) -> Union[bytes, str, "Graph"]: """Serialize the Graph to destination If destination is None serialize method returns the serialization as bytes or string. If encoding is None and destination is None, returns a string If encoding is set, and Destination is None, returns bytes Format defaults to turtle. Format support can be extended with plugins, but "xml", "n3", "turtle", "nt", "pretty-xml", "trix", "trig" and "nquads" are built in. """ # if base is not given as attribute use the base set for the graph if base is None: base = self.base serializer = plugin.get(format, Serializer)(self) stream: IO[bytes] if destination is None: stream = BytesIO() if encoding is None: serializer.serialize(stream, base=base, encoding="utf-8", **args) return stream.getvalue().decode("utf-8") else: serializer.serialize(stream, base=base, encoding=encoding, **args) return stream.getvalue() if hasattr(destination, "write"): stream = cast(IO[bytes], destination) serializer.serialize(stream, base=base, encoding=encoding, **args) else: if isinstance(destination, pathlib.PurePath): location = str(destination) else: location = cast(str, destination) scheme, netloc, path, params, _query, fragment = urlparse(location) if netloc != "": raise ValueError( f"destination {destination} is not a local file reference" ) fd, name = tempfile.mkstemp() stream = os.fdopen(fd, "wb") serializer.serialize(stream, base=base, encoding=encoding, **args) stream.close() dest = url2pathname(path) if scheme == "file" else location if hasattr(shutil, "move"): shutil.move(name, dest) else: shutil.copy(name, dest) os.remove(name) return self def print(self, format="turtle", encoding="utf-8", out=None): print( self.serialize(None, format=format, encoding=encoding).decode(encoding), file=out, flush=True, ) def parse( self, source=None, publicID=None, format: Optional[str] = None, location=None, file=None, data: Optional[Union[str, bytes, bytearray]] = None, **args, ): """ Parse an RDF source adding the resulting triples to the Graph. The source is specified using one of source, location, file or data. :Parameters: - `source`: An InputSource, file-like object, or string. In the case of a string the string is the location of the source. - `location`: A string indicating the relative or absolute URL of the source. Graph's absolutize method is used if a relative location is specified. - `file`: A file-like object. - `data`: A string containing the data to be parsed. - `format`: Used if format can not be determined from source, e.g. file extension or Media Type. Defaults to text/turtle. Format support can be extended with plugins, but "xml", "n3" (use for turtle), "nt" & "trix" are built in. - `publicID`: the logical URI to use as the document base. If None specified the document location is used (at least in the case where there is a document location). :Returns: - self, the graph instance. Examples: >>> my_data = ''' ... ... ... Example ... This is really just an example. ... ... ... ''' >>> import tempfile >>> fd, file_name = tempfile.mkstemp() >>> f = os.fdopen(fd, "w") >>> dummy = f.write(my_data) # Returns num bytes written >>> f.close() >>> g = Graph() >>> result = g.parse(data=my_data, format="application/rdf+xml") >>> len(g) 2 >>> g = Graph() >>> result = g.parse(location=file_name, format="application/rdf+xml") >>> len(g) 2 >>> g = Graph() >>> with open(file_name, "r") as f: ... result = g.parse(f, format="application/rdf+xml") >>> len(g) 2 >>> os.remove(file_name) >>> # default turtle parsing >>> result = g.parse(data=" .") >>> len(g) 3 """ source = create_input_source( source=source, publicID=publicID, location=location, file=file, data=data, format=format, ) if format is None: format = source.content_type could_not_guess_format = False if format is None: if ( hasattr(source, "file") and getattr(source.file, "name", None) and isinstance(source.file.name, str) ): format = rdflib.util.guess_format(source.file.name) if format is None: format = "turtle" could_not_guess_format = True parser = plugin.get(format, Parser)() try: # TODO FIXME: Parser.parse should have **kwargs argument. parser.parse(source, self, **args) # type: ignore[call-arg] except SyntaxError as se: if could_not_guess_format: raise ParserError( "Could not guess RDF format for %r from file extension so tried Turtle but failed." "You can explicitly specify format using the format argument." % source ) else: raise se finally: if source.auto_close: source.close() return self def load(self, source, publicID=None, format="xml"): warn( DeprecationWarning( "graph.load() is deprecated, it will be removed in rdflib 6.0.0. " "Please use graph.parse() instead." ) ) return self.parse(source, publicID, format) def query( self, query_object, processor: Union[str, query.Processor] = "sparql", result: Union[str, Type[query.Result]] = "sparql", initNs=None, initBindings=None, use_store_provided: bool = True, **kwargs, ) -> query.Result: """ Query this graph. A type of 'prepared queries' can be realised by providing initial variable bindings with initBindings Initial namespaces are used to resolve prefixes used in the query, if none are given, the namespaces from the graph's namespace manager are used. :returntype: rdflib.query.Result """ initBindings = initBindings or {} initNs = initNs or dict(self.namespaces()) if hasattr(self.store, "query") and use_store_provided: try: return self.store.query( query_object, initNs, initBindings, self.default_union and "__UNION__" or self.identifier, **kwargs, ) except NotImplementedError: pass # store has no own implementation if not isinstance(result, query.Result): result = plugin.get(cast(str, result), query.Result) if not isinstance(processor, query.Processor): processor = plugin.get(processor, query.Processor)(self) return result(processor.query(query_object, initBindings, initNs, **kwargs)) def update( self, update_object, processor="sparql", initNs=None, initBindings=None, use_store_provided=True, **kwargs, ): """Update this graph with the given update query.""" initBindings = initBindings or {} initNs = initNs or dict(self.namespaces()) if hasattr(self.store, "update") and use_store_provided: try: return self.store.update( update_object, initNs, initBindings, self.default_union and "__UNION__" or self.identifier, **kwargs, ) except NotImplementedError: pass # store has no own implementation if not isinstance(processor, query.UpdateProcessor): processor = plugin.get(processor, query.UpdateProcessor)(self) return processor.update(update_object, initBindings, initNs, **kwargs) def n3(self): """Return an n3 identifier for the Graph""" return "[%s]" % self.identifier.n3() def __reduce__(self): return ( Graph, ( self.store, self.identifier, ), ) def isomorphic(self, other): """ does a very basic check if these graphs are the same If no BNodes are involved, this is accurate. See rdflib.compare for a correct implementation of isomorphism checks """ # TODO: this is only an approximation. if len(self) != len(other): return False for s, p, o in self: if not isinstance(s, BNode) and not isinstance(o, BNode): if not (s, p, o) in other: return False for s, p, o in other: if not isinstance(s, BNode) and not isinstance(o, BNode): if not (s, p, o) in self: return False # TODO: very well could be a false positive at this point yet. return True def connected(self): """Check if the Graph is connected The Graph is considered undirectional. Performs a search on the Graph, starting from a random node. Then iteratively goes depth-first through the triplets where the node is subject and object. Return True if all nodes have been visited and False if it cannot continue and there are still unvisited nodes left. """ all_nodes = list(self.all_nodes()) discovered = [] # take a random one, could also always take the first one, doesn't # really matter. if not all_nodes: return False visiting = [all_nodes[random.randrange(len(all_nodes))]] while visiting: x = visiting.pop() if x not in discovered: discovered.append(x) for new_x in self.objects(subject=x): if new_x not in discovered and new_x not in visiting: visiting.append(new_x) for new_x in self.subjects(object=x): if new_x not in discovered and new_x not in visiting: visiting.append(new_x) # optimisation by only considering length, since no new objects can # be introduced anywhere. if len(all_nodes) == len(discovered): return True else: return False def all_nodes(self): res = set(self.objects()) res.update(self.subjects()) return res def collection(self, identifier): """Create a new ``Collection`` instance. Parameters: - ``identifier``: a URIRef or BNode instance. Example:: >>> graph = Graph() >>> uri = URIRef("http://example.org/resource") >>> collection = graph.collection(uri) >>> assert isinstance(collection, Collection) >>> assert collection.uri is uri >>> assert collection.graph is graph >>> collection += [ Literal(1), Literal(2) ] """ return Collection(self, identifier) def resource(self, identifier): """Create a new ``Resource`` instance. Parameters: - ``identifier``: a URIRef or BNode instance. Example:: >>> graph = Graph() >>> uri = URIRef("http://example.org/resource") >>> resource = graph.resource(uri) >>> assert isinstance(resource, Resource) >>> assert resource.identifier is uri >>> assert resource.graph is graph """ if not isinstance(identifier, Node): identifier = URIRef(identifier) return Resource(self, identifier) def _process_skolem_tuples(self, target, func): for t in self.triples((None, None, None)): target.add(func(t)) def skolemize(self, new_graph=None, bnode=None, authority=None, basepath=None): def do_skolemize(bnode, t): (s, p, o) = t if s == bnode: s = s.skolemize(authority=authority, basepath=basepath) if o == bnode: o = o.skolemize(authority=authority, basepath=basepath) return s, p, o def do_skolemize2(t): (s, p, o) = t if isinstance(s, BNode): s = s.skolemize(authority=authority, basepath=basepath) if isinstance(o, BNode): o = o.skolemize(authority=authority, basepath=basepath) return s, p, o retval = Graph() if new_graph is None else new_graph if bnode is None: self._process_skolem_tuples(retval, do_skolemize2) elif isinstance(bnode, BNode): self._process_skolem_tuples(retval, lambda t: do_skolemize(bnode, t)) return retval def de_skolemize(self, new_graph=None, uriref=None): def do_de_skolemize(uriref, t): (s, p, o) = t if s == uriref: s = s.de_skolemize() if o == uriref: o = o.de_skolemize() return s, p, o def do_de_skolemize2(t): (s, p, o) = t if isinstance(s, Genid): s = s.de_skolemize() if isinstance(o, Genid): o = o.de_skolemize() return s, p, o retval = Graph() if new_graph is None else new_graph if uriref is None: self._process_skolem_tuples(retval, do_de_skolemize2) elif isinstance(uriref, Genid): self._process_skolem_tuples(retval, lambda t: do_de_skolemize(uriref, t)) return retval def cbd(self, resource): """Retrieves the Concise Bounded Description of a Resource from a Graph Concise Bounded Description (CBD) is defined in [1] as: Given a particular node (the starting node) in a particular RDF graph (the source graph), a subgraph of that particular graph, taken to comprise a concise bounded description of the resource denoted by the starting node, can be identified as follows: 1. Include in the subgraph all statements in the source graph where the subject of the statement is the starting node; 2. Recursively, for all statements identified in the subgraph thus far having a blank node object, include in the subgraph all statements in the source graph where the subject of the statement is the blank node in question and which are not already included in the subgraph. 3. Recursively, for all statements included in the subgraph thus far, for all reifications of each statement in the source graph, include the concise bounded description beginning from the rdf:Statement node of each reification. This results in a subgraph where the object nodes are either URI references, literals, or blank nodes not serving as the subject of any statement in the graph. [1] https://www.w3.org/Submission/CBD/ :param resource: a URIRef object, of the Resource for queried for :return: a Graph, subgraph of self """ subgraph = Graph() def add_to_cbd(uri): for s, p, o in self.triples((uri, None, None)): subgraph.add((s, p, o)) # recurse 'down' through ll Blank Nodes if type(o) == BNode and not (o, None, None) in subgraph: add_to_cbd(o) # for Rule 3 (reification) # for any rdf:Statement in the graph with the given URI as the object of rdf:subject, # get all triples with that rdf:Statement instance as subject # find any subject s where the predicate is rdf:subject and this uri is the object # (these subjects are of type rdf:Statement, given the domain of rdf:subject) for s, p, o in self.triples((None, RDF.subject, uri)): # find all triples with s as the subject and add these to the subgraph for s2, p2, o2 in self.triples((s, None, None)): subgraph.add((s2, p2, o2)) add_to_cbd(resource) return subgraph class ConjunctiveGraph(Graph): """A ConjunctiveGraph is an (unnamed) aggregation of all the named graphs in a store. It has a ``default`` graph, whose name is associated with the graph throughout its life. :meth:`__init__` can take an identifier to use as the name of this default graph or it will assign a BNode. All methods that add triples work against this default graph. All queries are carried out against the union of all graphs. """ def __init__( self, store: Union[Store, str] = "default", identifier: Optional[Union[Node, str]] = None, default_graph_base: Optional[str] = None, ): super(ConjunctiveGraph, self).__init__(store, identifier=identifier) assert self.store.context_aware, ( "ConjunctiveGraph must be backed by" " a context aware store." ) self.context_aware = True self.default_union = True # Conjunctive! self.default_context = Graph( store=self.store, identifier=identifier or BNode(), base=default_graph_base ) def __str__(self): pattern = ( "[a rdflib:ConjunctiveGraph;rdflib:storage " "[a rdflib:Store;rdfs:label '%s']]" ) return pattern % self.store.__class__.__name__ @overload def _spoc( self, triple_or_quad: Union[ Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node] ], default: bool = False, ) -> Tuple[Node, Node, Node, Optional[Graph]]: ... @overload def _spoc( self, triple_or_quad: None, default: bool = False, ) -> Tuple[None, None, None, Optional[Graph]]: ... def _spoc( self, triple_or_quad: Optional[ Union[Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]] ], default: bool = False, ) -> Tuple[Optional[Node], Optional[Node], Optional[Node], Optional[Graph]]: """ helper method for having methods that support either triples or quads """ if triple_or_quad is None: return (None, None, None, self.default_context if default else None) if len(triple_or_quad) == 3: c = self.default_context if default else None (s, p, o) = triple_or_quad # type: ignore[misc] elif len(triple_or_quad) == 4: (s, p, o, c) = triple_or_quad # type: ignore[misc] c = self._graph(c) return s, p, o, c def __contains__(self, triple_or_quad): """Support for 'triple/quad in graph' syntax""" s, p, o, c = self._spoc(triple_or_quad) for t in self.triples((s, p, o), context=c): return True return False def add(self, triple_or_quad: Union[Tuple[Node, Node, Node, Optional[Any]], Tuple[Node, Node, Node]]) -> "ConjunctiveGraph": # type: ignore[override] """ Add a triple or quad to the store. if a triple is given it is added to the default context """ s, p, o, c = self._spoc(triple_or_quad, default=True) _assertnode(s, p, o) self.store.add((s, p, o), context=c, quoted=False) return self @overload def _graph(self, c: Union[Graph, Node, str]) -> Graph: ... @overload def _graph(self, c: None) -> None: ... def _graph(self, c: Optional[Union[Graph, Node, str]]) -> Optional[Graph]: if c is None: return None if not isinstance(c, Graph): return self.get_context(c) else: return c def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]): """Add a sequence of triples with context""" self.store.addN( (s, p, o, self._graph(c)) for s, p, o, c in quads if _assertnode(s, p, o) ) return self def remove(self, triple_or_quad): """ Removes a triple or quads if a triple is given it is removed from all contexts a quad is removed from the given context only """ s, p, o, c = self._spoc(triple_or_quad) self.store.remove((s, p, o), context=c) return self def triples(self, triple_or_quad, context=None): """ Iterate over all the triples in the entire conjunctive graph For legacy reasons, this can take the context to query either as a fourth element of the quad, or as the explicit context keyword parameter. The kw param takes precedence. """ s, p, o, c = self._spoc(triple_or_quad) context = self._graph(context or c) if self.default_union: if context == self.default_context: context = None else: if context is None: context = self.default_context if isinstance(p, Path): if context is None: context = self for s, o in p.eval(context, s, o): yield s, p, o else: for (s, p, o), cg in self.store.triples((s, p, o), context=context): yield s, p, o def quads(self, triple_or_quad=None): """Iterate over all the quads in the entire conjunctive graph""" s, p, o, c = self._spoc(triple_or_quad) for (s, p, o), cg in self.store.triples((s, p, o), context=c): for ctx in cg: yield s, p, o, ctx def triples_choices(self, triple, context=None): """Iterate over all the triples in the entire conjunctive graph""" s, p, o = triple if context is None: if not self.default_union: context = self.default_context else: context = self._graph(context) for (s1, p1, o1), cg in self.store.triples_choices((s, p, o), context=context): yield s1, p1, o1 def __len__(self): """Number of triples in the entire conjunctive graph""" return self.store.__len__() def contexts(self, triple=None): """Iterate over all contexts in the graph If triple is specified, iterate over all contexts the triple is in. """ for context in self.store.contexts(triple): if isinstance(context, Graph): # TODO: One of these should never happen and probably # should raise an exception rather than smoothing over # the weirdness - see #225 yield context else: yield self.get_context(context) def get_context( self, identifier: Optional[Union[Node, str]], quoted: bool = False, base: Optional[str] = None, ) -> Graph: """Return a context graph for the given identifier identifier must be a URIRef or BNode. """ # TODO: FIXME - why is ConjunctiveGraph passed as namespace_manager? return Graph( store=self.store, identifier=identifier, namespace_manager=self, base=base # type: ignore[arg-type] ) def remove_context(self, context): """Removes the given context from the graph""" self.store.remove((None, None, None), context) def context_id(self, uri, context_id=None): """URI#context""" uri = uri.split("#", 1)[0] if context_id is None: context_id = "#context" return URIRef(context_id, base=uri) def parse( self, source=None, publicID=None, format=None, location=None, file=None, data=None, **args, ): """ Parse source adding the resulting triples to its own context (sub graph of this graph). See :meth:`rdflib.graph.Graph.parse` for documentation on arguments. :Returns: The graph into which the source was parsed. In the case of n3 it returns the root context. """ source = create_input_source( source=source, publicID=publicID, location=location, file=file, data=data, format=format, ) g_id = publicID and publicID or source.getPublicId() if not isinstance(g_id, Node): g_id = URIRef(g_id) context = Graph(store=self.store, identifier=g_id) context.remove((None, None, None)) # hmm ? context.parse(source, publicID=publicID, format=format, **args) # TODO: FIXME: This should not return context, but self. return context def __reduce__(self): return ConjunctiveGraph, (self.store, self.identifier) DATASET_DEFAULT_GRAPH_ID = URIRef("urn:x-rdflib:default") class Dataset(ConjunctiveGraph): __doc__ = """ RDF 1.1 Dataset. Small extension to the Conjunctive Graph: - the primary term is graphs in the datasets and not contexts with quads, so there is a separate method to set/retrieve a graph in a dataset and operate with graphs - graphs cannot be identified with blank nodes - added a method to directly add a single quad Examples of usage: >>> # Create a new Dataset >>> ds = Dataset() >>> # simple triples goes to default graph >>> ds.add((URIRef("http://example.org/a"), ... URIRef("http://www.example.org/b"), ... Literal("foo"))) # doctest: +ELLIPSIS )> >>> >>> # Create a graph in the dataset, if the graph name has already been >>> # used, the corresponding graph will be returned >>> # (ie, the Dataset keeps track of the constituent graphs) >>> g = ds.graph(URIRef("http://www.example.com/gr")) >>> >>> # add triples to the new graph as usual >>> g.add( ... (URIRef("http://example.org/x"), ... URIRef("http://example.org/y"), ... Literal("bar")) ) # doctest: +ELLIPSIS )> >>> # alternatively: add a quad to the dataset -> goes to the graph >>> ds.add( ... (URIRef("http://example.org/x"), ... URIRef("http://example.org/z"), ... Literal("foo-bar"),g) ) # doctest: +ELLIPSIS )> >>> >>> # querying triples return them all regardless of the graph >>> for t in ds.triples((None,None,None)): # doctest: +SKIP ... print(t) # doctest: +NORMALIZE_WHITESPACE (rdflib.term.URIRef("http://example.org/a"), rdflib.term.URIRef("http://www.example.org/b"), rdflib.term.Literal("foo")) (rdflib.term.URIRef("http://example.org/x"), rdflib.term.URIRef("http://example.org/z"), rdflib.term.Literal("foo-bar")) (rdflib.term.URIRef("http://example.org/x"), rdflib.term.URIRef("http://example.org/y"), rdflib.term.Literal("bar")) >>> >>> # querying quads() return quads; the fourth argument can be unrestricted >>> # (None) or restricted to a graph >>> for q in ds.quads((None, None, None, None)): # doctest: +SKIP ... print(q) # doctest: +NORMALIZE_WHITESPACE (rdflib.term.URIRef("http://example.org/a"), rdflib.term.URIRef("http://www.example.org/b"), rdflib.term.Literal("foo"), None) (rdflib.term.URIRef("http://example.org/x"), rdflib.term.URIRef("http://example.org/y"), rdflib.term.Literal("bar"), rdflib.term.URIRef("http://www.example.com/gr")) (rdflib.term.URIRef("http://example.org/x"), rdflib.term.URIRef("http://example.org/z"), rdflib.term.Literal("foo-bar"), rdflib.term.URIRef("http://www.example.com/gr")) >>> >>> # unrestricted looping is equivalent to iterating over the entire Dataset >>> for q in ds: # doctest: +SKIP ... print(q) # doctest: +NORMALIZE_WHITESPACE (rdflib.term.URIRef("http://example.org/a"), rdflib.term.URIRef("http://www.example.org/b"), rdflib.term.Literal("foo"), None) (rdflib.term.URIRef("http://example.org/x"), rdflib.term.URIRef("http://example.org/y"), rdflib.term.Literal("bar"), rdflib.term.URIRef("http://www.example.com/gr")) (rdflib.term.URIRef("http://example.org/x"), rdflib.term.URIRef("http://example.org/z"), rdflib.term.Literal("foo-bar"), rdflib.term.URIRef("http://www.example.com/gr")) >>> >>> # resticting iteration to a graph: >>> for q in ds.quads((None, None, None, g)): # doctest: +SKIP ... print(q) # doctest: +NORMALIZE_WHITESPACE (rdflib.term.URIRef("http://example.org/x"), rdflib.term.URIRef("http://example.org/y"), rdflib.term.Literal("bar"), rdflib.term.URIRef("http://www.example.com/gr")) (rdflib.term.URIRef("http://example.org/x"), rdflib.term.URIRef("http://example.org/z"), rdflib.term.Literal("foo-bar"), rdflib.term.URIRef("http://www.example.com/gr")) >>> # Note that in the call above - >>> # ds.quads((None,None,None,"http://www.example.com/gr")) >>> # would have been accepted, too >>> >>> # graph names in the dataset can be queried: >>> for c in ds.graphs(): # doctest: +SKIP ... print(c) # doctest: DEFAULT http://www.example.com/gr >>> # A graph can be created without specifying a name; a skolemized genid >>> # is created on the fly >>> h = ds.graph() >>> for c in ds.graphs(): # doctest: +SKIP ... print(c) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS DEFAULT http://rdlib.net/.well-known/genid/rdflib/N... http://www.example.com/gr >>> # Note that the Dataset.graphs() call returns names of empty graphs, >>> # too. This can be restricted: >>> for c in ds.graphs(empty=False): # doctest: +SKIP ... print(c) # doctest: +NORMALIZE_WHITESPACE DEFAULT http://www.example.com/gr >>> >>> # a graph can also be removed from a dataset via ds.remove_graph(g) .. versionadded:: 4.0 """ def __init__(self, store="default", default_union=False, default_graph_base=None): super(Dataset, self).__init__(store=store, identifier=None) if not self.store.graph_aware: raise Exception("DataSet must be backed by a graph-aware store!") self.default_context = Graph( store=self.store, identifier=DATASET_DEFAULT_GRAPH_ID, base=default_graph_base, ) self.default_union = default_union def __str__(self): pattern = ( "[a rdflib:Dataset;rdflib:storage " "[a rdflib:Store;rdfs:label '%s']]" ) return pattern % self.store.__class__.__name__ def __reduce__(self): return (type(self), (self.store, self.default_union)) def __getstate__(self): return self.store, self.identifier, self.default_context, self.default_union def __setstate__(self, state): self.store, self.identifier, self.default_context, self.default_union = state def graph(self, identifier=None, base=None): if identifier is None: from rdflib.term import rdflib_skolem_genid self.bind( "genid", "http://rdflib.net" + rdflib_skolem_genid, override=False ) identifier = BNode().skolemize() g = self._graph(identifier) g.base = base self.store.add_graph(g) return g def parse( self, source=None, publicID=None, format=None, location=None, file=None, data=None, **args, ): c = ConjunctiveGraph.parse( self, source, publicID, format, location, file, data, **args ) self.graph(c) return c def add_graph(self, g): """alias of graph for consistency""" return self.graph(g) def remove_graph(self, g): if not isinstance(g, Graph): g = self.get_context(g) self.store.remove_graph(g) if g is None or g == self.default_context: # default graph cannot be removed # only triples deleted, so add it back in self.store.add_graph(self.default_context) return self def contexts(self, triple=None): default = False for c in super(Dataset, self).contexts(triple): default |= c.identifier == DATASET_DEFAULT_GRAPH_ID yield c if not default: yield self.graph(DATASET_DEFAULT_GRAPH_ID) graphs = contexts def quads(self, quad): for s, p, o, c in super(Dataset, self).quads(quad): if c.identifier == self.default_context: yield s, p, o, None else: yield s, p, o, c.identifier def __iter__(self) -> Generator[DatasetQuad, None, None]: """Iterates over all quads in the store""" return self.quads((None, None, None, None)) class QuotedGraph(Graph): """ Quoted Graphs are intended to implement Notation 3 formulae. They are associated with a required identifier that the N3 parser *must* provide in order to maintain consistent formulae identification for scenarios such as implication and other such processing. """ def __init__(self, store, identifier): super(QuotedGraph, self).__init__(store, identifier) def add(self, triple: Tuple[Node, Node, Node]): """Add a triple with self as context""" s, p, o = triple assert isinstance(s, Node), "Subject %s must be an rdflib term" % (s,) assert isinstance(p, Node), "Predicate %s must be an rdflib term" % (p,) assert isinstance(o, Node), "Object %s must be an rdflib term" % (o,) self.store.add((s, p, o), self, quoted=True) return self def addN(self, quads: Tuple[Node, Node, Node, Any]) -> "QuotedGraph": # type: ignore[override] """Add a sequence of triple with context""" self.store.addN( (s, p, o, c) for s, p, o, c in quads if isinstance(c, QuotedGraph) and c.identifier is self.identifier and _assertnode(s, p, o) ) return self def n3(self): """Return an n3 identifier for the Graph""" return "{%s}" % self.identifier.n3() def __str__(self): identifier = self.identifier.n3() label = self.store.__class__.__name__ pattern = ( "{this rdflib.identifier %s;rdflib:storage " "[a rdflib:Store;rdfs:label '%s']}" ) return pattern % (identifier, label) def __reduce__(self): return QuotedGraph, (self.store, self.identifier) # Make sure QuotedGraph is ordered correctly # wrt to other Terms. # this must be done here, as the QuotedGraph cannot be # circularily imported in term.py rdflib.term._ORDERING[QuotedGraph] = 11 class Seq(object): """Wrapper around an RDF Seq resource It implements a container type in Python with the order of the items returned corresponding to the Seq content. It is based on the natural ordering of the predicate names _1, _2, _3, etc, which is the 'implementation' of a sequence in RDF terms. """ def __init__(self, graph, subject): """Parameters: - graph: the graph containing the Seq - subject: the subject of a Seq. Note that the init does not check whether this is a Seq, this is done in whoever creates this instance! """ _list = self._list = list() LI_INDEX = URIRef(str(RDF) + "_") for (p, o) in graph.predicate_objects(subject): if p.startswith(LI_INDEX): # != RDF.Seq: # i = int(p.replace(LI_INDEX, "")) _list.append((i, o)) # here is the trick: the predicates are _1, _2, _3, etc. Ie, # by sorting the keys (by integer) we have what we want! _list.sort() def toPython(self): return self def __iter__(self): """Generator over the items in the Seq""" for _, item in self._list: yield item def __len__(self): """Length of the Seq""" return len(self._list) def __getitem__(self, index): """Item given by index from the Seq""" index, item = self._list.__getitem__(index) return item class ModificationException(Exception): def __init__(self): pass def __str__(self): return ( "Modifications and transactional operations not allowed on " "ReadOnlyGraphAggregate instances" ) class UnSupportedAggregateOperation(Exception): def __init__(self): pass def __str__(self): return "This operation is not supported by ReadOnlyGraphAggregate " "instances" class ReadOnlyGraphAggregate(ConjunctiveGraph): """Utility class for treating a set of graphs as a single graph Only read operations are supported (hence the name). Essentially a ConjunctiveGraph over an explicit subset of the entire store. """ def __init__(self, graphs, store="default"): if store is not None: super(ReadOnlyGraphAggregate, self).__init__(store) Graph.__init__(self, store) self.__namespace_manager = None assert ( isinstance(graphs, list) and graphs and [g for g in graphs if isinstance(g, Graph)] ), "graphs argument must be a list of Graphs!!" self.graphs = graphs def __repr__(self): return "" % len(self.graphs) def destroy(self, configuration): raise ModificationException() # Transactional interfaces (optional) def commit(self): raise ModificationException() def rollback(self): raise ModificationException() def open(self, configuration, create=False): # TODO: is there a use case for this method? for graph in self.graphs: graph.open(self, configuration, create) def close(self): for graph in self.graphs: graph.close() def add(self, triple): raise ModificationException() def addN(self, quads): raise ModificationException() def remove(self, triple): raise ModificationException() def triples(self, triple): s, p, o = triple for graph in self.graphs: if isinstance(p, Path): for s, o in p.eval(self, s, o): yield s, p, o else: for s1, p1, o1 in graph.triples((s, p, o)): yield s1, p1, o1 def __contains__(self, triple_or_quad): context = None if len(triple_or_quad) == 4: context = triple_or_quad[3] for graph in self.graphs: if context is None or graph.identifier == context.identifier: if triple_or_quad[:3] in graph: return True return False def quads(self, triple): """Iterate over all the quads in the entire aggregate graph""" s, p, o = triple for graph in self.graphs: for s1, p1, o1 in graph.triples((s, p, o)): yield s1, p1, o1, graph def __len__(self): return sum(len(g) for g in self.graphs) def __hash__(self): raise UnSupportedAggregateOperation() def __cmp__(self, other): if other is None: return -1 elif isinstance(other, Graph): return -1 elif isinstance(other, ReadOnlyGraphAggregate): return (self.graphs > other.graphs) - (self.graphs < other.graphs) else: return -1 def __iadd__(self, other): raise ModificationException() def __isub__(self, other): raise ModificationException() # Conv. methods def triples_choices(self, triple, context=None): subject, predicate, object_ = triple for graph in self.graphs: choices = graph.triples_choices((subject, predicate, object_)) for (s, p, o) in choices: yield s, p, o def qname(self, uri): if hasattr(self, "namespace_manager") and self.namespace_manager: return self.namespace_manager.qname(uri) raise UnSupportedAggregateOperation() def compute_qname(self, uri, generate=True): if hasattr(self, "namespace_manager") and self.namespace_manager: return self.namespace_manager.compute_qname(uri, generate) raise UnSupportedAggregateOperation() def bind(self, prefix, namespace, override=True): raise UnSupportedAggregateOperation() def namespaces(self): if hasattr(self, "namespace_manager"): for prefix, namespace in self.namespace_manager.namespaces(): yield prefix, namespace else: for graph in self.graphs: for prefix, namespace in graph.namespaces(): yield prefix, namespace def absolutize(self, uri, defrag=1): raise UnSupportedAggregateOperation() def parse(self, source, publicID=None, format=None, **args): raise ModificationException() def n3(self): raise UnSupportedAggregateOperation() def __reduce__(self): raise UnSupportedAggregateOperation() def _assertnode(*terms): for t in terms: assert isinstance(t, Node), "Term %s must be an rdflib term" % (t,) return True class BatchAddGraph(object): """ Wrapper around graph that turns batches of calls to Graph's add (and optionally, addN) into calls to batched calls to addN`. :Parameters: - graph: The graph to wrap - batch_size: The maximum number of triples to buffer before passing to Graph's addN - batch_addn: If True, then even calls to `addN` will be batched according to batch_size graph: The wrapped graph count: The number of triples buffered since initialization or the last call to reset batch: The current buffer of triples """ def __init__(self, graph: Graph, batch_size: int = 1000, batch_addn: bool = False): if not batch_size or batch_size < 2: raise ValueError("batch_size must be a positive number") self.graph = graph self.__graph_tuple = (graph,) self.__batch_size = batch_size self.__batch_addn = batch_addn self.reset() def reset(self): """ Manually clear the buffered triples and reset the count to zero """ self.batch = [] self.count = 0 return self def add( self, triple_or_quad: Union[Tuple[Node, Node, Node], Tuple[Node, Node, Node, Any]], ) -> "BatchAddGraph": """ Add a triple to the buffer :param triple: The triple to add """ if len(self.batch) >= self.__batch_size: self.graph.addN(self.batch) self.batch = [] self.count += 1 if len(triple_or_quad) == 3: self.batch.append(triple_or_quad + self.__graph_tuple) else: self.batch.append(triple_or_quad) return self def addN(self, quads: Iterable[Tuple[Node, Node, Node, Any]]): if self.__batch_addn: for q in quads: self.add(q) else: self.graph.addN(quads) return self def __enter__(self): self.reset() return self def __exit__(self, *exc): if exc[0] is None: self.graph.addN(self.batch) def test(): import doctest doctest.testmod() if __name__ == "__main__": test() rdflib-6.1.1/rdflib/namespace/000077500000000000000000000000001415774155300161735ustar00rootroot00000000000000rdflib-6.1.1/rdflib/namespace/_BRICK.py000066400000000000000000003621361415774155300175510ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class BRICK(DefinedNamespace): """ Brick Ontology classes, properties and entity properties. See https://brickschema.org/ for more information. Generated from: https://github.com/BrickSchema/Brick/releases/download/nightly/Brick.ttl Date: 2021-09-22T14:32:56 """ # http://www.w3.org/2002/07/owl#Class AED: URIRef AHU: URIRef # Assembly consisting of sections containing a fan or fans and other necessary equipment to perform one or more of the following functions: circulating, filtration, heating, cooling, heat recovery, humidifying, dehumidifying, and mixing of air. Is usually connected to an air-distribution system. Ablutions_Room: URIRef # A room for performing cleansing rituals before prayer Absorption_Chiller: URIRef # A chiller that utilizes a thermal or/and chemical process to produce the refrigeration effect necessary to provide chilled water. There is no mechanical compression of the refrigerant taking place within the machine, as occurs within more traditional vapor compression type chillers. Acceleration_Time_Setpoint: URIRef Access_Control_Equipment: URIRef Access_Reader: URIRef Active_Chilled_Beam: URIRef # A Chilled Beam with an integral primary air connection that induces air flow through the device. Active_Power_Sensor: URIRef # Measures the portion of power that, averaged over a complete cycle of the AC waveform, results in net transfer of energy in one direction Adjust_Sensor: URIRef # Measures user-provided adjustment of some value Air: URIRef # the invisible gaseous substance surrounding the earth, a mixture mainly of oxygen and nitrogen. Air_Alarm: URIRef Air_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure between two regions of air Air_Differential_Pressure_Setpoint: URIRef # Sets the target air differential pressure between an upstream and downstream point in a air duct or conduit Air_Diffuser: URIRef # A device that is a component of the air distribution system that controls the delivery of conditioned and/or ventilating air into a room Air_Enthalpy_Sensor: URIRef # Measures the total heat content of air Air_Flow_Deadband_Setpoint: URIRef # Sets the size of a deadband of air flow Air_Flow_Demand_Setpoint: URIRef # Sets the rate of air flow required for a process Air_Flow_Loss_Alarm: URIRef # An alarm that indicates loss in air flow. Air_Flow_Sensor: URIRef # Measures the rate of flow of air Air_Flow_Setpoint: URIRef # Sets air flow Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Air_Flow_Setpoint. Air_Grains_Sensor: URIRef # Measures the mass of water vapor in air Air_Handler_Unit: URIRef # Assembly consisting of sections containing a fan or fans and other necessary equipment to perform one or more of the following functions: circulating, filtration, heating, cooling, heat recovery, humidifying, dehumidifying, and mixing of air. Is usually connected to an air-distribution system. Air_Handling_Unit: URIRef Air_Humidity_Setpoint: URIRef Air_Loop: URIRef # The set of connected equipment serving one path of air Air_Plenum: URIRef # A component of the HVAC the receives air from the air handling unit or room to distribute or exhaust to or from the building Air_Quality_Sensor: URIRef # A sensor which provides a measure of air quality Air_Static_Pressure_Step_Parameter: URIRef Air_System: URIRef # The equipment, distribution systems and terminals that introduce or exhaust, either collectively or individually, the air into and from the building Air_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the temperature of air. Air_Temperature_Integral_Time_Parameter: URIRef Air_Temperature_Sensor: URIRef # Measures the temperature of air Air_Temperature_Setpoint: URIRef # Sets temperature of air Air_Temperature_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Air_Temperature_Setpoint. Air_Temperature_Step_Parameter: URIRef Air_Wet_Bulb_Temperature_Sensor: URIRef Alarm: URIRef # Alarm points are signals (either audible or visual) that alert an operator to an off-normal condition which requires some form of corrective action Alarm_Delay_Parameter: URIRef # A parameter determining how long to delay an alarm after sufficient conditions have been met Angle_Sensor: URIRef # Measues the planar angle of some phenomenon Auditorium: URIRef # A space for performances or larger gatherings Automated_External_Defibrillator: URIRef Automatic_Mode_Command: URIRef # Controls whether or not a device or controller is operating in "Automatic" mode Availability_Status: URIRef # Indicates if a piece of equipment, system, or functionality is available for operation Average_Cooling_Demand_Sensor: URIRef # Measures the average power consumed by a cooling process as the amount of power consumed over some interval Average_Discharge_Air_Flow_Sensor: URIRef # The computed average flow of discharge air over some interval Average_Exhaust_Air_Static_Pressure_Sensor: URIRef # The computed average static pressure of air in exhaust regions of an HVAC system over some period of time Average_Heating_Demand_Sensor: URIRef # Measures the average power consumed by a heating process as the amount of power consumed over some interval Average_Supply_Air_Flow_Sensor: URIRef # The computed average flow of supply air over some interval Average_Zone_Air_Temperature_Sensor: URIRef # The computed average temperature of air in a zone, over some period of time Baseboard_Radiator: URIRef # Steam, hydronic, or electric heating device located at or near the floor. Basement: URIRef # The floor of a building which is partly or entirely below ground level. Battery: URIRef # A container that stores chemical energy that can be converted into electricity and used as a source of power Battery_Energy_Storage_System: URIRef # A collection of batteries that provides energy storage, along with their supporting equipment Battery_Room: URIRef # A room used to hold batteries for backup power Battery_Voltage_Sensor: URIRef # Measures the capacity of a battery Bench_Space: URIRef # For areas of play in a stadium, the area for partcipants and referees by the side of the field Blowdown_Water: URIRef # Water expelled from a system to remove mineral build up Boiler: URIRef # A closed, pressure vessel that uses fuel or electricity for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Booster_Fan: URIRef # Fan activated to increase airflow beyond what is provided by the default configuration Box_Mode_Command: URIRef Break_Room: URIRef # A space for people to relax while not working Breaker_Panel: URIRef # Breaker Panel distributes power into various end-uses. Breakroom: URIRef # A space for people to relax while not working Broadcast_Room: URIRef # A space to organize and manage a broadcast. Separate from studio Building: URIRef # An independent unit of the built environment with a characteristic spatial structure, intended to serve at least one function or user activity [ISO 12006-2:2013] Building_Air: URIRef # air contained within a building Building_Air_Humidity_Setpoint: URIRef # Setpoint for humidity in a building Building_Air_Static_Pressure_Sensor: URIRef # The static pressure of air within a building Building_Air_Static_Pressure_Setpoint: URIRef # Sets static pressure of the entire building Building_Chilled_Water_Meter: URIRef # A meter that measures the usage or consumption of chilled water of a whole building Building_Electrical_Meter: URIRef # A meter that measures the usage or consumption of electricity of a whole building Building_Gas_Meter: URIRef # A meter that measures the usage or consumption of gas of a whole building Building_Hot_Water_Meter: URIRef # A meter that measures the usage or consumption of hot water of a whole building Building_Meter: URIRef # A meter that measures usage or consumption of some media for a whole building Building_Water_Meter: URIRef # A meter that measures the usage or consumption of water of a whole building Bus_Riser: URIRef # Bus Risers are commonly fed from a switchgear and rise up through a series of floors to the main power distribution source for each floor. Bypass_Air: URIRef # air in a bypass duct, used to relieve static pressure Bypass_Air_Flow_Sensor: URIRef # Measures the rate of flow of bypass air Bypass_Air_Humidity_Setpoint: URIRef # Humidity setpoint for bypass air Bypass_Command: URIRef Bypass_Valve: URIRef # A type of valve installed in a bypass pipeline Bypass_Water: URIRef # Water that circumvents a piece of equipment or system Bypass_Water_Flow_Sensor: URIRef # Measures the rate of flow of bypass water Bypass_Water_Flow_Setpoint: URIRef # Sets the target flow rate of bypass water CAV: URIRef CO: URIRef # Carbon Monoxide in the vapor phase CO2: URIRef # Carbon Dioxide in the vapor phase CO2_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the presence of carbon dioxide. CO2_Differential_Sensor: URIRef # Measures the difference between CO2 levels of inside and outside air CO2_Level_Sensor: URIRef # Measures the concentration of CO2 in air CO2_Sensor: URIRef # Measures properties of CO2 in air CO2_Setpoint: URIRef # Sets some property of CO2 CO_Differential_Sensor: URIRef CO_Level_Sensor: URIRef # Measures the concentration of CO CO_Sensor: URIRef # Measures properties of CO CRAC: URIRef Cafeteria: URIRef # A space to serve food and beverages Camera: URIRef Capacity_Sensor: URIRef Ceiling_Fan: URIRef # A fan installed on the ceiling of a room for the purpose of air circulation Centrifugal_Chiller: URIRef # A chiller that uses the vapor compression cycle to chill water. It throws off the heat collected from the chilled water plus the heat from the compressor to a water loop Change_Filter_Alarm: URIRef # An alarm that indicates that a filter must be changed Chilled_Beam: URIRef # A device with an integrated coil that performs sensible heating of a space via circulation of room air. Chilled Beams are not designed to perform latent cooling; see Induction Units. Despite their name, Chilled Beams may perform heating or cooling of a space depending on their configuration. Chilled_Water: URIRef # water used as a cooling medium (particularly in air-conditioning systems or in processes) at below ambient temperature. Chilled_Water_Coil: URIRef # A cooling element made of pipe or tube that removes heat from equipment, machines or airflows that is filled with chilled water. Chilled_Water_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of chilled water Chilled_Water_Differential_Pressure_Integral_Time_Parameter: URIRef Chilled_Water_Differential_Pressure_Load_Shed_Reset_Status: URIRef Chilled_Water_Differential_Pressure_Load_Shed_Setpoint: URIRef Chilled_Water_Differential_Pressure_Load_Shed_Status: URIRef Chilled_Water_Differential_Pressure_Proportional_Band_Parameter: URIRef Chilled_Water_Differential_Pressure_Sensor: URIRef # Measures the difference in water pressure on either side of a chilled water valve Chilled_Water_Differential_Pressure_Setpoint: URIRef # Sets the target water differential pressure between an upstream and downstream point in a water pipe or conduit used to carry chilled water Chilled_Water_Differential_Pressure_Step_Parameter: URIRef Chilled_Water_Differential_Temperature_Sensor: URIRef # Measures the difference in temperature between the entering water to the chiller or other water cooling device and leaving water from the same chiller or other water cooling device Chilled_Water_Discharge_Flow_Sensor: URIRef # Measures the rate of flow of chilled discharge water Chilled_Water_Discharge_Flow_Setpoint: URIRef # Sets the target flow rate of chilled discharge water Chilled_Water_Flow_Sensor: URIRef # Measures the rate of flow in a chilled water circuit Chilled_Water_Flow_Setpoint: URIRef # Sets the target flow rate of chilled water Chilled_Water_Loop: URIRef # A collection of equipment that transport and regulate chilled water among each other Chilled_Water_Meter: URIRef # A meter that measures the usage or consumption of chilled water Chilled_Water_Pump: URIRef # A pump that performs work on chilled water; typically part of a chilled water system Chilled_Water_Pump_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of chilled water in a chilled water pump Chilled_Water_Return_Flow_Sensor: URIRef # Measures the rate of flow of chilled return water Chilled_Water_Return_Temperature_Sensor: URIRef # Measures the temperature of chilled water that is returned to a cooling tower Chilled_Water_Static_Pressure_Setpoint: URIRef # Sets static pressure of chilled water Chilled_Water_Supply_Flow_Sensor: URIRef # Measures the rate of flow of chilled supply water Chilled_Water_Supply_Flow_Setpoint: URIRef # Sets the target flow rate of chilled supply water Chilled_Water_Supply_Temperature_Sensor: URIRef # Measures the temperature of chilled water that is supplied from a chiller Chilled_Water_System: URIRef # The equipment, devices and conduits that handle the production and distribution of chilled water in a building Chilled_Water_System_Enable_Command: URIRef # Enables operation of the chilled water system Chilled_Water_Temperature_Sensor: URIRef # Measures the temperature of chilled water Chilled_Water_Temperature_Setpoint: URIRef # Sets the temperature of chilled water Chilled_Water_Valve: URIRef # A valve that modulates the flow of chilled water Chiller: URIRef # Refrigerating machine used to transfer heat between fluids. Chillers are either direct expansion with a compressor or absorption type. Class: URIRef Close_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Close_Setpoint. Coil: URIRef # Cooling or heating element made of pipe or tube that may or may not be finned and formed into helical or serpentine shape (ASHRAE Dictionary) Cold_Box: URIRef # in a gas separation unit, the insulated section that contains the low-temperature heat exchangers and distillation columns. Coldest_Zone_Air_Temperature_Sensor: URIRef # The zone temperature that is coldest; drives the supply temperature of hot air. A computed value rather than a physical sensor. Also referred to as a 'Lowest Zone Air Temperature Sensor' Collection: URIRef Collection_Basin_Water: URIRef # Water transiently collected and directed to the sump or pump suction line, typically integral with a cooling tower Collection_Basin_Water_Heater: URIRef # Basin heaters prevent cold water basin freeze-up, e.g. in cooling towers, closed circuit fluid coolers, or evaporative condensers Collection_Basin_Water_Level_Alarm: URIRef # An alarm that indicates a high or low level of water in the collection basin, e.g. within a Cooling_Tower Collection_Basin_Water_Level_Sensor: URIRef # Measures the level of the water in the collection basin, e.g. within a Cooling_Tower Collection_Basin_Water_Temperature_Sensor: URIRef # Measures the temperature of the water in the collection basin, e.g. within a Cooling_Tower Command: URIRef # A Command is an output point that directly determines the behavior of equipment and/or affects relevant operational points. Common_Space: URIRef # A class of spaces that are used by multiple people at the same time Communication_Loss_Alarm: URIRef # An alarm that indicates a loss of communication e.g. with a device or controller Compressor: URIRef # (1) device for mechanically increasing the pressure of a gas. (2) often described as being either open, hermetic, or semihermetic to describe how the compressor and motor drive is situated in relation to the gas or vapor being compressed. Types include centrifugal, axial flow, reciprocating, rotary screw, rotary vane, scroll, or diaphragm. 1. device for mechanically increasing the pressure of a gas. 2. specific machine, with or without accessories, for compressing refrigerant vapor. Computer_Room_Air_Conditioning: URIRef # A device that monitors and maintains the temperature, air distribution and humidity in a network room or data center. Concession: URIRef # A space to sell food and beverages. Usually embedded in a larger space and does not include a space where people consume their purchases Condensate_Leak_Alarm: URIRef # An alarm that indicates a leak of condensate from a cooling system Condenser: URIRef # A heat exchanger in which the primary heat transfer vapor changes its state to a liquid phase. Condenser_Heat_Exchanger: URIRef # A heat exchanger in which the primary heat transfer vapor changes its state to a liquid phase. Condenser_Water: URIRef # Water used used to remove heat through condensation Condenser_Water_Bypass_Valve: URIRef # A valve installed in a bypass line of a condenser water loop Condenser_Water_Isolation_Valve: URIRef # An isolation valve installed in the condenser water loop Condenser_Water_Pump: URIRef # A pump that is part of a condenser system; the pump circulates condenser water from the chiller back to the cooling tower Condenser_Water_System: URIRef # A heat rejection system consisting of (typically) cooling towers, condenser water pumps, chillers and the piping connecting the components Condenser_Water_Temperature_Sensor: URIRef # Measures the temperature of condenser water Condenser_Water_Valve: URIRef # A valve that modulates the flow of condenser water Condensing_Natural_Gas_Boiler: URIRef # A closed, pressure vessel that uses natural gas and heat exchanger that capture and reuse any latent heat for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Conductivity_Sensor: URIRef # Measures electrical conductance Conference_Room: URIRef # A space dedicated in which to hold a meetings Constant_Air_Volume_Box: URIRef # A terminal unit for which supply air flow rate is constant and the supply air temperature is varied to meet thermal load Contact_Sensor: URIRef # Senses or detects contact, such as for determining if a door is closed. Control_Room: URIRef # A space from which operations are managed Cooling_Coil: URIRef # A cooling element made of pipe or tube that removes heat from equipment, machines or airflows. Typically filled with either refrigerant or cold water. Cooling_Command: URIRef # Controls the amount of cooling to be delivered (typically as a proportion of total cooling output) Cooling_Demand_Sensor: URIRef # Measures the amount of power consumed by a cooling process; typically found by multiplying the tonnage of a unit (e.g. RTU) by the efficiency rating in kW/ton Cooling_Demand_Setpoint: URIRef # Sets the rate required for cooling Cooling_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for cooling Cooling_Discharge_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of cooling discharge air Cooling_Discharge_Air_Temperature_Integral_Time_Parameter: URIRef Cooling_Discharge_Air_Temperature_Proportional_Band_Parameter: URIRef Cooling_Start_Stop_Status: URIRef Cooling_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate for cooling Cooling_Supply_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of supply air for cooling Cooling_Supply_Air_Temperature_Integral_Time_Parameter: URIRef Cooling_Supply_Air_Temperature_Proportional_Band_Parameter: URIRef Cooling_Temperature_Setpoint: URIRef # Sets temperature for cooling Cooling_Tower: URIRef # A cooling tower is a heat rejection device that rejects waste heat to the atmosphere through the cooling of a water stream to a lower temperature. Cooling towers may either use the evaporation of water to remove process heat and cool the working fluid to near the wet-bulb air temperature or, in the case of closed circuit dry cooling towers, rely solely on air to cool the working fluid to near the dry-bulb air temperature. Cooling_Tower_Fan: URIRef # A fan that pulls air through a cooling tower and across the louvers where the water falls to aid in heat exchange by the process of evaporation Cooling_Valve: URIRef # A valve that controls air temperature by modulating the amount of cold water flowing through a cooling coil Copy_Room: URIRef # A room set aside for common office equipment, including printers and copiers Core_Temperature_Sensor: URIRef # Measures the internal temperature of the radiant layer at the heat source or sink level of the radiant heating and cooling HVAC system. Core_Temperature_Setpoint: URIRef # Sets temperature for the core, i.e. the temperature at the heat source or sink level, of the radiant panel. Cubicle: URIRef # A smaller space set aside for an individual, but not with a door and without full-height walls Current_Imbalance_Sensor: URIRef # A sensor which measures the current difference (imbalance) between phases of an electrical system Current_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Current_Setpoint. Current_Output_Sensor: URIRef # Senses the amperes of electrical current produced as output by a device Current_Sensor: URIRef # Senses the amperes of electrical current passing through the sensor Curtailment_Override_Command: URIRef Cycle_Alarm: URIRef # An alarm that indicates off-normal conditions associated with HVAC cycles DC_Bus_Voltage_Sensor: URIRef # Measures the voltage across a DC bus DOAS: URIRef # See Dedicated_Outdoor_Air_System_Unit Damper: URIRef # Element inserted into an air-distribution system or element of an air-distribution system permitting modification of the air resistance of the system and consequently changing the airflow rate or shutting off the airflow. Damper_Command: URIRef # Controls properties of dampers Damper_Position_Command: URIRef # Controls the position (the degree of openness) of a damper Damper_Position_Sensor: URIRef # Measures the current position of a damper in terms of the percent of fully open Damper_Position_Setpoint: URIRef # Sets the position of damper Deadband_Setpoint: URIRef # Sets the size of a deadband Deceleration_Time_Setpoint: URIRef Dedicated_Outdoor_Air_System_Unit: URIRef # A device that conditions and delivers 100% outdoor air to its assigned spaces. It decouples air-conditioning of the outdoor air, usually used to provide minimum outdoor air ventilation, from conditioning of the internal loads. Dehumidification_Start_Stop_Status: URIRef Deionised_Water_Conductivity_Sensor: URIRef # Measures the electrical conductance of deionised water Deionised_Water_Level_Sensor: URIRef # Measures the height/level of deionised water in some container Deionized_Water: URIRef # Water which has been purified by removing its ions (constituting the majority of non-particulate contaminants) Deionized_Water_Alarm: URIRef # An alarm that indicates deionized water leaks. Delay_Parameter: URIRef # A parameter determining how long to delay a subsequent action to take place after a received signal Demand_Sensor: URIRef # Measures the amount of power consumed by the use of some process; typically found by multiplying the tonnage of a unit (e.g. RTU) by the efficiency rating in kW/ton Demand_Setpoint: URIRef # Sets the rate required for a process Derivative_Gain_Parameter: URIRef Derivative_Time_Parameter: URIRef Detention_Room: URIRef # A space for the temporary involuntary confinement of people Dew_Point_Setpoint: URIRef # Sets dew point Dewpoint_Sensor: URIRef # Senses the dewpoint temperature . Dew point is the temperature to which air must be cooled to become saturated with water vapor Differential_Air_Temperature_Setpoint: URIRef # Sets temperature of differential air Differential_Pressure_Bypass_Valve: URIRef # A 2-way, self contained proportional valve with an integral differential pressure adjustment setting. Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure Differential_Pressure_Integral_Time_Parameter: URIRef Differential_Pressure_Load_Shed_Status: URIRef Differential_Pressure_Proportional_Band: URIRef Differential_Pressure_Sensor: URIRef # Measures the difference between two applied pressures Differential_Pressure_Setpoint: URIRef # Sets differential pressure Differential_Pressure_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Differential_Pressure_Setpoint. Differential_Pressure_Step_Parameter: URIRef Differential_Speed_Sensor: URIRef Differential_Speed_Setpoint: URIRef # Sets differential speed Differential_Supply_Return_Water_Temperature_Sensor: URIRef # Measures the difference in temperature between return and supply water of water a circuit Dimmer: URIRef # A switch providing continuous control over all or part of a lighting installation; typically potentiometer-based Direct_Expansion_Cooling_Coil: URIRef Direct_Expansion_Heating_Coil: URIRef Direction_Command: URIRef # Commands that affect the direction of some phenomenon Direction_Sensor: URIRef # Measures the direction in degrees in which a phenomenon is occurring Direction_Status: URIRef # Indicates which direction a device is operating in Disable_Command: URIRef # Commands that disable functionality Disable_Differential_Enthalpy_Command: URIRef # Disables the use of differential enthalpy control Disable_Differential_Temperature_Command: URIRef # Disables the use of differential temperature control Disable_Fixed_Enthalpy_Command: URIRef # Disables the use of fixed enthalpy control Disable_Fixed_Temperature_Command: URIRef # Disables the use of fixed temperature temperature Disable_Hot_Water_System_Outside_Air_Temperature_Setpoint: URIRef # Disables hot water system when outside air temperature reaches the indicated value Disable_Status: URIRef # Indicates if functionality has been disabled Discharge_Air: URIRef # the air exiting the registers (vents). Discharge_Air_Dewpoint_Sensor: URIRef # Measures dewpoint of discharge air Discharge_Air_Duct_Pressure_Status: URIRef # Indicates if air pressure in discharge duct is within expected bounds Discharge_Air_Flow_Demand_Setpoint: URIRef # Sets the rate of discharge air flow required for a process Discharge_Air_Flow_High_Reset_Setpoint: URIRef Discharge_Air_Flow_Low_Reset_Setpoint: URIRef Discharge_Air_Flow_Reset_Setpoint: URIRef # Setpoints used in Reset strategies Discharge_Air_Flow_Sensor: URIRef # Measures the rate of flow of discharge air Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow Discharge_Air_Humidity_Sensor: URIRef # Measures the relative humidity of discharge air Discharge_Air_Humidity_Setpoint: URIRef # Humidity setpoint for discharge air Discharge_Air_Smoke_Detection_Alarm: URIRef Discharge_Air_Static_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of static pressure of discharge air Discharge_Air_Static_Pressure_Integral_Time_Parameter: URIRef Discharge_Air_Static_Pressure_Proportional_Band_Parameter: URIRef Discharge_Air_Static_Pressure_Sensor: URIRef # The static pressure of air within discharge regions of an HVAC system Discharge_Air_Static_Pressure_Setpoint: URIRef # Sets static pressure of discharge air Discharge_Air_Static_Pressure_Step_Parameter: URIRef Discharge_Air_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the temperature of discharge air. Discharge_Air_Temperature_Cooling_Setpoint: URIRef # Sets temperature of discharge air for cooling Discharge_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of discharge air Discharge_Air_Temperature_Heating_Setpoint: URIRef # Sets temperature of discharge air for heating Discharge_Air_Temperature_High_Reset_Setpoint: URIRef Discharge_Air_Temperature_Low_Reset_Setpoint: URIRef Discharge_Air_Temperature_Proportional_Band_Parameter: URIRef Discharge_Air_Temperature_Reset_Differential_Setpoint: URIRef Discharge_Air_Temperature_Sensor: URIRef # Measures the temperature of discharge air Discharge_Air_Temperature_Setpoint: URIRef # Sets temperature of discharge air Discharge_Air_Temperature_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Discharge_Air_Temperature_Setpoint. Discharge_Air_Temperature_Step_Parameter: URIRef Discharge_Air_Velocity_Pressure_Sensor: URIRef Discharge_Chilled_Water: URIRef Discharge_Fan: URIRef # Fan moving air discharged from HVAC vents Discharge_Hot_Water: URIRef Discharge_Water: URIRef Discharge_Water_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of discharge water Discharge_Water_Differential_Pressure_Integral_Time_Parameter: URIRef Discharge_Water_Differential_Pressure_Proportional_Band_Parameter: URIRef Discharge_Water_Flow_Sensor: URIRef # Measures the rate of flow of discharge water Discharge_Water_Flow_Setpoint: URIRef # Sets the target flow rate of discharge water Discharge_Water_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with temperature of the discharge water. Discharge_Water_Temperature_Proportional_Band_Parameter: URIRef Discharge_Water_Temperature_Sensor: URIRef # Measures the temperature of discharge water Discharge_Water_Temperature_Setpoint: URIRef # Sets temperature of discharge water Disconnect_Switch: URIRef # Building power is most commonly provided by utility company through a master disconnect switch (sometimes called a service disconnect) in the main electrical room of a building. The Utility Company provided master disconnect switch often owns or restricts access to this switch. There can also be other cases where a disconnect is placed into an electrical system to allow service cut-off to a portion of the building. Displacement_Flow_Air_Diffuser: URIRef # An air diffuser that is designed for low discharge air speeds to minimize turbulence and induction of room air. This diffuser is used with displacement ventilation systems. Distribution_Frame: URIRef # A class of spaces where the cables carrying signals meet and connect, e.g. a wiring closet or a broadcast downlink room Domestic_Hot_Water_Supply_Temperature_Sensor: URIRef # Measures the temperature of domestic water supplied by a hot water system Domestic_Hot_Water_Supply_Temperature_Setpoint: URIRef # Sets temperature of supplying part of domestic hot water Domestic_Hot_Water_System: URIRef # The equipment, devices and conduits that handle the production and distribution of domestic hot water in a building Domestic_Hot_Water_System_Enable_Command: URIRef # Enables operation of the domestic hot water system Domestic_Hot_Water_Temperature_Setpoint: URIRef # Sets temperature of domestic hot water Domestic_Hot_Water_Valve: URIRef # A valve regulating the flow of domestic hot water Domestic_Water: URIRef # Tap water for drinking, washing, cooking, and flushing of toliets Domestic_Water_Loop: URIRef Drench_Hose: URIRef Drive_Ready_Status: URIRef # Indicates if a hard drive or other storage device is ready to be used, e.g. in the context of RAID Duration_Sensor: URIRef # Measures the duration of a phenomenon or event ESS_Panel: URIRef # See Embedded_Surface_System_Panel EconCycle_Start_Stop_Status: URIRef Economizer: URIRef # Device that, on proper variable sensing, initiates control signals or actions to conserve energy. A control system that reduces the mechanical heating and cooling requirement. Economizer_Damper: URIRef # A damper that is part of an economizer that is used to module the flow of air Effective_Air_Temperature_Cooling_Setpoint: URIRef Effective_Air_Temperature_Heating_Setpoint: URIRef Effective_Air_Temperature_Setpoint: URIRef Effective_Discharge_Air_Temperature_Setpoint: URIRef Effective_Return_Air_Temperature_Setpoint: URIRef Effective_Room_Air_Temperature_Setpoint: URIRef Effective_Supply_Air_Temperature_Setpoint: URIRef Effective_Zone_Air_Temperature_Setpoint: URIRef Electric_Baseboard_Radiator: URIRef # Electric heating device located at or near the floor Electric_Boiler: URIRef # A closed, pressure vessel that uses electricity for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Electric_Radiator: URIRef # Electric heating device Electrical_Equipment: URIRef Electrical_Meter: URIRef # A meter that measures the usage or consumption of electricity Electrical_Power_Sensor: URIRef # Measures the amount of instantaneous electric power consumed Electrical_Room: URIRef # A class of service rooms that house electrical equipment for a building Electrical_System: URIRef # Devices that serve or are part of the electrical subsystem in the building Elevator: URIRef # A device that provides vertical transportation between floors, levels or decks of a building, vessel or other structure Elevator_Shaft: URIRef # The vertical space in which an elevator ascends and descends Elevator_Space: URIRef # The vertical space in which an elevator ascends and descends Embedded_Surface_System_Panel: URIRef # Radiant panel heating and cooling system where the energy heat source or sink is embedded in a radiant layer which is thermally insulated from the building structure. Embedded_Temperature_Sensor: URIRef # Measures the internal temperature of the radiant layer of the radiant heating and cooling HVAC system. Embedded_Temperature_Setpoint: URIRef # Sets temperature for the internal material, e.g. concrete slab, of the radiant panel. Emergency_Air_Flow_System: URIRef Emergency_Air_Flow_System_Status: URIRef Emergency_Alarm: URIRef # Alarms that indicate off-normal conditions associated with emergency systems Emergency_Generator_Alarm: URIRef # An alarm that indicates off-normal conditions associated with an emergency generator Emergency_Generator_Status: URIRef # Indicates if an emergency generator is active Emergency_Phone: URIRef Emergency_Power_Off_System: URIRef # A system that can power down a single piece of equipment or a single system from a single point Emergency_Power_Off_System_Activated_By_High_Temperature_Status: URIRef Emergency_Power_Off_System_Activated_By_Leak_Detection_System_Status: URIRef Emergency_Power_Off_System_Status: URIRef Emergency_Push_Button_Status: URIRef # Indicates if an emergency button has been pushed Emergency_Wash_Station: URIRef Employee_Entrance_Lobby: URIRef # An open space near an entrance that is typically only used for employees Enable_Command: URIRef # Commands that enable functionality Enable_Differential_Enthalpy_Command: URIRef # Enables the use of differential enthalpy control Enable_Differential_Temperature_Command: URIRef # Enables the use of differential temperature control Enable_Fixed_Enthalpy_Command: URIRef # Enables the use of fixed enthalpy control Enable_Fixed_Temperature_Command: URIRef # Enables the use of fixed temperature control Enable_Hot_Water_System_Outside_Air_Temperature_Setpoint: URIRef # Enables hot water system when outside air temperature reaches the indicated value Enable_Status: URIRef # Indicates if a system or piece of functionality has been enabled Enclosed_Office: URIRef # A space for individuals to work with walls and a door Energy_Generation_System: URIRef # A collection of devices that generates electricity Energy_Sensor: URIRef # Measures energy consumption Energy_Storage: URIRef # Devices or equipment that store energy in its various forms Energy_Storage_System: URIRef # A collection of devices that stores electricity Energy_System: URIRef # A collection of devices that generates, stores or transports electricity Energy_Usage_Sensor: URIRef # Measures the total amount of energy used over some period of time Energy_Zone: URIRef # A space or group of spaces that are managed or monitored as one unit for energy purposes Entering_Water: URIRef # Water that is entering a piece of equipment or system Entering_Water_Flow_Sensor: URIRef # Measures the rate of flow of water entering a piece of equipment or system Entering_Water_Flow_Setpoint: URIRef # Sets the target flow rate of entering water Entering_Water_Temperature_Sensor: URIRef # Measures the temperature of water entering a piece of equipment or system Entering_Water_Temperature_Setpoint: URIRef # Sets temperature of entering water Enthalpy_Sensor: URIRef # Measures the total heat content of some substance Enthalpy_Setpoint: URIRef # Sets enthalpy Entrance: URIRef # The location and space of a building where people enter and exit the building Environment_Box: URIRef # (also known as climatic chamber), enclosed space designed to create a particular environment. Equipment: URIRef # devices that serve all or part of the building and may include electric power, lighting, transportation, or service water heating, including, but not limited to, furnaces, boilers, air conditioners, heat pumps, chillers, water heaters, lamps, luminaires, ballasts, elevators, escalators, or other devices or installations. Equipment_Room: URIRef # A telecommunications room where equipment that serves the building is stored Evaporative_Heat_Exchanger: URIRef Even_Month_Status: URIRef Exercise_Room: URIRef # An indoor room used for exercise and physical activities Exhaust_Air: URIRef # air that must be removed from a space due to contaminants, regardless of pressurization Exhaust_Air_Dewpoint_Sensor: URIRef # Measures dewpoint of exhaust air Exhaust_Air_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure between an upstream and downstream of an air duct or other air conduit used to exhaust air from the building Exhaust_Air_Differential_Pressure_Setpoint: URIRef # Sets the target air differential pressure between an upstream and downstream point in a exhaust air duct or conduit Exhaust_Air_Flow_Integral_Time_Parameter: URIRef Exhaust_Air_Flow_Proportional_Band_Parameter: URIRef Exhaust_Air_Flow_Sensor: URIRef # Measures the rate of flow of exhaust air Exhaust_Air_Flow_Setpoint: URIRef # Sets exhaust air flow rate Exhaust_Air_Humidity_Sensor: URIRef # Measures the relative humidity of exhaust air Exhaust_Air_Humidity_Setpoint: URIRef # Humidity setpoint for exhaust air Exhaust_Air_Stack_Flow_Deadband_Setpoint: URIRef # Sets the size of a deadband of exhaust air stack flow Exhaust_Air_Stack_Flow_Integral_Time_Parameter: URIRef Exhaust_Air_Stack_Flow_Proportional_Band_Parameter: URIRef Exhaust_Air_Stack_Flow_Sensor: URIRef # Measures the rate of flow of air in the exhaust air stack Exhaust_Air_Stack_Flow_Setpoint: URIRef # Sets exhaust air stack flow rate Exhaust_Air_Static_Pressure_Proportional_Band_Parameter: URIRef Exhaust_Air_Static_Pressure_Sensor: URIRef # The static pressure of air within exhaust regions of an HVAC system Exhaust_Air_Static_Pressure_Setpoint: URIRef # Sets static pressure of exhaust air Exhaust_Air_Temperature_Sensor: URIRef # Measures the temperature of exhaust air Exhaust_Air_Velocity_Pressure_Sensor: URIRef Exhaust_Damper: URIRef # A damper that modulates the flow of exhaust air Exhaust_Fan: URIRef # Fan moving exhaust air -- air that must be removed from a space due to contaminants Exhaust_Fan_Disable_Command: URIRef # Disables operation of the exhaust fan Exhaust_Fan_Enable_Command: URIRef # Enables operation of the exhaust fan Eye_Wash_Station: URIRef FCU: URIRef # See Fan_Coil_Unit Failure_Alarm: URIRef # Alarms that indicate the failure of devices, equipment, systems and control loops Fan: URIRef # Any device with two or more blades or vanes attached to a rotating shaft used to produce an airflow for the purpose of comfort, ventilation, exhaust, heating, cooling, or any other gaseous transport. Fan_Coil_Unit: URIRef # Terminal device consisting of a heating and/or cooling heat exchanger or 'coil' and fan that is used to control the temperature in the space where it is installed Fan_On_Off_Status: URIRef Fan_Status: URIRef # Indicates properties of fans Fan_VFD: URIRef # Variable-frequency drive for fans Fault_Reset_Command: URIRef # Clears a fault status Fault_Status: URIRef # Indicates the presence of a fault in a device, system or control loop Field_Of_Play: URIRef # The area of a stadium where athletic events occur, e.g. the soccer pitch Filter: URIRef # Device to remove gases from a mixture of gases or to remove solid material from a fluid Filter_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure on either side of a filter Filter_Reset_Command: URIRef Filter_Status: URIRef # Indicates if a filter needs to be replaced Final_Filter: URIRef # The last, high-efficiency filter installed in a sequence to remove the finest particulates from the substance being filtered Fire_Control_Panel: URIRef # A panel-mounted device that provides status and control of a fire safety system Fire_Safety_Equipment: URIRef Fire_Safety_System: URIRef # A system containing devices and equipment that monitor, detect and suppress fire hazards Fire_Sensor: URIRef # Measures the presence of fire Fire_Zone: URIRef # combustion chamber in a furnace or boiler. First_Aid_Kit: URIRef First_Aid_Room: URIRef # A room for a person with minor injuries can be treated or temporarily treated until transferred to a more advanced medical facility Floor: URIRef # A level, typically representing a horizontal aggregation of spaces that are vertically bound. (referring to IFC) Flow_Sensor: URIRef # Measures the rate of flow of some substance Flow_Setpoint: URIRef # Sets flow Fluid: URIRef # substance, as a liquid or gas, that is capable of flowing and that changes shape when acted on by a force. Food_Service_Room: URIRef # A space used in the production, storage, serving, or cleanup of food and beverages Formaldehyde_Level_Sensor: URIRef # Measures the concentration of formaldehyde in air Freeze_Status: URIRef # Indicates if a substance contained within a vessel has frozen Freezer: URIRef # cold chamber usually kept at a temperature of 22°F to 31°F (–5°C to –1°C), with high-volume air circulation. Frequency_Command: URIRef # Controls the frequency of a device's operation (e.g. rotational frequency) Frequency_Sensor: URIRef # Measures the frequency of a phenomenon or aspect of a phenomenon, e.g. the frequency of a fan turning Fresh_Air_Fan: URIRef # Fan moving fresh air -- air that is supplied into the building from the outdoors Fresh_Air_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Fresh_Air_Setpoint. Frost: URIRef # frost formed on the cold surface (tubes, plates) of a cooling coil. Frost_Sensor: URIRef # Senses the presence of frost or conditions that may cause frost Fuel_Oil: URIRef # Petroleum based oil burned for energy Fume_Hood: URIRef # A fume-collection device mounted over a work space, table, or shelf and serving to conduct unwanted gases away from the area enclosed. Fume_Hood_Air_Flow_Sensor: URIRef # Measures the rate of flow of air in a fume hood Furniture: URIRef # Movable objects intended to support various human activities such as seating, eating and sleeping Gain_Parameter: URIRef Gas: URIRef # state of matter in which substances exist in the form of nonaggregated molecules and which, within acceptable limits of accuracy, satisfy the ideal gas laws; usually a highly superheated vapor. See [[state]]. Gas_Distribution: URIRef # Utilize a gas distribution source to represent how gas is distributed across multiple destinations Gas_Meter: URIRef # A meter that measures the usage or consumption of gas Gas_Sensor: URIRef # Measures gas concentration (other than CO2) Gas_System: URIRef Gas_Valve: URIRef Gasoline: URIRef # Petroleum derived liquid used as a fuel source Gatehouse: URIRef # The standalone building used to manage the entrance to a campus or building grounds Generator_Room: URIRef # A room for electrical equipment, specifically electrical generators. Glycol: URIRef HVAC_Equipment: URIRef # See Heating_Ventilation_Air_Conditioning_System HVAC_System: URIRef # See Heating_Ventilation_Air_Conditioning_System HVAC_Zone: URIRef # a space or group of spaces, within a building with heating, cooling, and ventilating requirements, that are sufficiently similar so that desired conditions (e.g., temperature) can be maintained throughout using a single sensor (e.g., thermostat or temperature sensor). HX: URIRef # See Heat_Exchanger Hail: URIRef # pellets of frozen rain which fall in showers from cumulonimbus clouds. Hail_Sensor: URIRef # Measures hail in terms of its size and damage potential Hallway: URIRef # A common space, used to connect other parts of a building Hazardous_Materials_Storage: URIRef # A storage space set aside (usually with restricted access) for the storage of materials that can be hazardous to living beings or the environment Heat_Exchanger: URIRef # A heat exchanger is a piece of equipment built for efficient heat transfer from one medium to another. The media may be separated by a solid wall to prevent mixing or they may be in direct contact (BEDES) Heat_Exchanger_Supply_Water_Temperature_Sensor: URIRef # Measures the temperature of water supplied by a heat exchanger Heat_Exchanger_System_Enable_Status: URIRef # Indicates if the heat exchanger system has been enabled Heat_Recovery_Hot_Water_System: URIRef Heat_Sensor: URIRef # Measures heat Heat_Wheel: URIRef # A rotary heat exchanger positioned within the supply and exhaust air streams of an air handling system in order to recover heat energy Heat_Wheel_VFD: URIRef # A VFD that drives a heat wheel Heating_Coil: URIRef # A heating element typically made of pipe, tube or wire that emits heat. Typically filled with hot water, or, in the case of wire, uses electricity. Heating_Command: URIRef # Controls the amount of heating to be delivered (typically as a proportion of total heating output) Heating_Demand_Sensor: URIRef # Measures the amount of power consumed by a heating process; typically found by multiplying the tonnage of a unit (e.g. RTU) by the efficiency rating in kW/ton Heating_Demand_Setpoint: URIRef # Sets the rate required for heating Heating_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for heating Heating_Discharge_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of heating discharge air Heating_Discharge_Air_Temperature_Integral_Time_Parameter: URIRef Heating_Discharge_Air_Temperature_Proportional_Band_Parameter: URIRef Heating_Start_Stop_Status: URIRef Heating_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate for heating Heating_Supply_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of supply air for heating Heating_Supply_Air_Temperature_Integral_Time_Parameter: URIRef Heating_Supply_Air_Temperature_Proportional_Band_Parameter: URIRef Heating_Temperature_Setpoint: URIRef # Sets temperature for heating Heating_Thermal_Power_Sensor: URIRef Heating_Valve: URIRef # A valve that controls air temperature by modulating the amount of hot water flowing through a heating coil Heating_Ventilation_Air_Conditioning_System: URIRef # The equipment, distribution systems and terminals that provide, either collectively or individually, the processes of heating, ventilating or air conditioning to a building or portion of a building High_CO2_Alarm: URIRef # A device that indicates high concentration of carbon dioxide. High_Discharge_Air_Temperature_Alarm: URIRef # An alarm that indicates that discharge air temperature is too high High_Head_Pressure_Alarm: URIRef # An alarm that indicates a high pressure generated on the output side of a gas compressor in a refrigeration or air conditioning system. High_Humidity_Alarm: URIRef # An alarm that indicates high concentration of water vapor in the air. High_Humidity_Alarm_Parameter: URIRef # A parameter determining the humidity level at which to trigger a high humidity alarm High_Outside_Air_Lockout_Temperature_Differential_Parameter: URIRef # The upper bound of the outside air temperature lockout range High_Return_Air_Temperature_Alarm: URIRef # An alarm that indicates that return air temperature is too high High_Static_Pressure_Cutout_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a High_Static_Pressure_Cutout_Setpoint. High_Temperature_Alarm: URIRef # An alarm that indicates high temperature. High_Temperature_Alarm_Parameter: URIRef # A parameter determining the temperature level at which to trigger a high temperature alarm High_Temperature_Hot_Water_Return_Temperature_Sensor: URIRef # Measures the temperature of high-temperature hot water returned to a hot water system High_Temperature_Hot_Water_Supply_Temperature_Sensor: URIRef # Measures the temperature of high-temperature hot water supplied by a hot water system Hold_Status: URIRef Hospitality_Box: URIRef # A room at a stadium, usually overlooking the field of play, that is physical separate from the other seating at the venue Hot_Box: URIRef # hot air chamber forming part of an air handler. Hot_Water: URIRef # Hot water used for HVAC heating or supply to hot taps Hot_Water_Baseboard_Radiator: URIRef # Hydronic heating device located at or near the floor Hot_Water_Coil: URIRef # A heating element typically made of pipe, tube or wire that emits heat that is filled with hot water. Hot_Water_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of hot water Hot_Water_Differential_Pressure_Integral_Time_Parameter: URIRef Hot_Water_Differential_Pressure_Load_Shed_Reset_Status: URIRef Hot_Water_Differential_Pressure_Load_Shed_Status: URIRef Hot_Water_Differential_Pressure_Proportional_Band_Parameter: URIRef Hot_Water_Differential_Pressure_Sensor: URIRef # Measures the difference in water pressure on either side of a hot water valve Hot_Water_Differential_Pressure_Setpoint: URIRef # Sets the target water differential pressure between an upstream and downstream point in a water pipe or conduit used to carry hot water Hot_Water_Differential_Temperature_Sensor: URIRef # Measures the difference in temperature between the entering water to the boiler or other water heating device and leaving water from the same boiler or other water heating device Hot_Water_Discharge_Flow_Sensor: URIRef # Measures the rate of flow of hot discharge water Hot_Water_Discharge_Flow_Setpoint: URIRef # Sets the target flow rate of hot discharge water Hot_Water_Discharge_Temperature_Load_Shed_Status: URIRef Hot_Water_Flow_Sensor: URIRef # Measures the rate of flow in a hot water circuit Hot_Water_Flow_Setpoint: URIRef # Sets the target flow rate of hot water Hot_Water_Loop: URIRef # A collection of equipment that transport and regulate hot water among each other Hot_Water_Meter: URIRef # A meter that measures the usage or consumption of hot water Hot_Water_Pump: URIRef # A pump that performs work on hot water; typically part of a hot water system Hot_Water_Radiator: URIRef # Radiator that uses hot water Hot_Water_Return_Flow_Sensor: URIRef # Measures the rate of flow of hot return water Hot_Water_Return_Temperature_Sensor: URIRef # Measures the temperature of water returned to a hot water system Hot_Water_Static_Pressure_Setpoint: URIRef # Sets static pressure of hot air Hot_Water_Supply_Flow_Sensor: URIRef # Measures the rate of flow of hot supply water Hot_Water_Supply_Flow_Setpoint: URIRef # Sets the target flow rate of hot supply water Hot_Water_Supply_Temperature_High_Reset_Setpoint: URIRef Hot_Water_Supply_Temperature_Load_Shed_Status: URIRef Hot_Water_Supply_Temperature_Low_Reset_Setpoint: URIRef Hot_Water_Supply_Temperature_Sensor: URIRef # Measures the temperature of water supplied by a hot water system Hot_Water_System: URIRef # The equipment, devices and conduits that handle the production and distribution of hot water in a building Hot_Water_System_Enable_Command: URIRef # Enables operation of the hot water system Hot_Water_Temperature_Setpoint: URIRef # Sets the temperature of hot water Hot_Water_Usage_Sensor: URIRef # Measures the amount of hot water that is consumed, over some period of time Hot_Water_Valve: URIRef # A valve regulating the flow of hot water Humidification_Start_Stop_Status: URIRef Humidifier: URIRef # A device that adds moisture to air or other gases Humidifier_Fault_Status: URIRef # Indicates the presence of a fault in a humidifier Humidify_Command: URIRef Humidity_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the concentration of water vapor in the air. Humidity_Parameter: URIRef # Parameters relevant to humidity-related systems and points Humidity_Sensor: URIRef # Measures the concentration of water vapor in air Humidity_Setpoint: URIRef # Sets humidity Humidity_Tolerance_Parameter: URIRef # A parameter determining the difference between upper and lower limits of humidity. IDF: URIRef # An room for an intermediate distribution frame, where cables carrying signals from the main distribution frame terminate and then feed out to endpoints Ice: URIRef # Water in its solid form Ice_Tank_Leaving_Water_Temperature_Sensor: URIRef # Measures the temperature of water leaving an ice tank Illuminance_Sensor: URIRef # Measures the total luminous flux incident on a surface, per unit area Imbalance_Sensor: URIRef # A sensor which measures difference (imbalance) between phases of an electrical system Induction_Unit: URIRef # A device with an primary air connection and integrated coil and condensate pan that performs sensible and latent cooling of a space. Essentially an Active Chilled Beam with a built in condensate pan. Information_Area: URIRef # An information booth or kiosk where visitors would look for information Inside_Face_Surface_Temperature_Sensor: URIRef # Measures the inside surface (relative to the space) of the radiant panel of the radiant heating and cooling HVAC system. Inside_Face_Surface_Temperature_Setpoint: URIRef # Sets temperature for the inside face surface temperature of the radiant panel. Intake_Air_Filter: URIRef # Filters air intake Intake_Air_Temperature_Sensor: URIRef # Measures air at the interface between the building and the outside Integral_Gain_Parameter: URIRef Integral_Time_Parameter: URIRef Intercom_Equipment: URIRef Interface: URIRef # A device that provides an occupant control over a lighting system Intrusion_Detection_Equipment: URIRef Inverter: URIRef # A device that changes direct current into alternating current Isolation_Valve: URIRef # A valve that stops the flow of a fluid, usually for maintenance or safety purposes Janitor_Room: URIRef # A room set aside for the storage of cleaning equipment and supplies Jet_Nozzle_Air_Diffuser: URIRef # An air diffuser that is designed to produce high velocity discharge air stream to throw the air over a large distance or target the air stream to a localize area Laboratory: URIRef # facility acceptable to the local, national, or international recognized authority having jurisdiction and which provides uniform testing and examination procedures and standards for meeting design, manufacturing, and factory testing requirements. Laminar_Flow_Air_Diffuser: URIRef # An air diffuser that is designed for low discharge air speeds to provide uniform and unidirectional air pattern which minimizes room air entrainment Last_Fault_Code_Status: URIRef # Indicates the last fault code that occurred Lead_Lag_Command: URIRef # Enables lead/lag operation Lead_Lag_Status: URIRef # Indicates if lead/lag operation is enabled Lead_On_Off_Command: URIRef # Controls the active/inactive status of the "lead" part of a lead/lag system Leak_Alarm: URIRef # An alarm that indicates leaks occurred in systems containing fluids Leaving_Water: URIRef # Water that is leaving a piece of equipment or system Leaving_Water_Flow_Sensor: URIRef # Measures the rate of flow of water that is leaving a piece of equipment or system Leaving_Water_Flow_Setpoint: URIRef # Sets the target flow rate of leaving water Leaving_Water_Temperature_Sensor: URIRef # Measures the temperature of water leaving a piece of equipment or system Leaving_Water_Temperature_Setpoint: URIRef # Sets temperature of leaving water Library: URIRef # A place for the storage and/or consumption of physical media, e.g. books, periodicals, and DVDs/CDs Lighting: URIRef Lighting_Equipment: URIRef Lighting_System: URIRef # The equipment, devices and interfaces that serve or are a part of the lighting subsystem in a building Lighting_Zone: URIRef Limit: URIRef # A parameter that places an upper or lower bound on the range of permitted values of another point Liquid: URIRef # state of matter intermediate between crystalline substances and gases in which the volume of a substance, but not the shape, remains relatively constant. Liquid_CO2: URIRef # Carbon Dioxide in the liquid phase Liquid_Detection_Alarm: URIRef Load_Current_Sensor: URIRef # Measures the current consumed by a load Load_Parameter: URIRef Load_Setpoint: URIRef Load_Shed_Command: URIRef # Controls load shedding behavior provided by a control system Load_Shed_Differential_Pressure_Setpoint: URIRef Load_Shed_Setpoint: URIRef Load_Shed_Status: URIRef # Indicates if a load shedding policy is in effect Loading_Dock: URIRef # A part of a facility where delivery trucks can load and unload. Usually partially enclosed with specific traffic lanes leading to the dock Lobby: URIRef # A space just after the entrance to a building or other space of a building, where visitors can wait Locally_On_Off_Status: URIRef Location: URIRef Lockout_Status: URIRef # Indicates if a piece of equipment, system, or functionality has been locked out from operation Lockout_Temperature_Differential_Parameter: URIRef Loop: URIRef # A collection of connected equipment; part of a System Lounge: URIRef # A room for lesiure activities or relaxing Louver: URIRef # Device consisting of an assembly of parallel sloping vanes, intended to permit the passage of air while providing a measure of protection against environmental influences Low_Freeze_Protect_Temperature_Parameter: URIRef Low_Humidity_Alarm: URIRef # An alarm that indicates low concentration of water vapor in the air. Low_Humidity_Alarm_Parameter: URIRef # A parameter determining the humidity level at which to trigger a low humidity alarm Low_Outside_Air_Lockout_Temperature_Differential_Parameter: URIRef # The lower bound of the outside air temperature lockout range Low_Outside_Air_Temperature_Enable_Differential_Sensor: URIRef Low_Outside_Air_Temperature_Enable_Setpoint: URIRef Low_Return_Air_Temperature_Alarm: URIRef # An alarm that indicates that return air temperature is too low Low_Suction_Pressure_Alarm: URIRef # An alarm that indicates a low suction pressure in the compressor in a refrigeration or air conditioning system. Low_Temperature_Alarm: URIRef # An alarm that indicates low temperature. Low_Temperature_Alarm_Parameter: URIRef # A parameter determining the temperature level at which to trigger a low temperature alarm Lowest_Exhaust_Air_Static_Pressure_Sensor: URIRef # The lowest observed static pressure of air in exhaust regions of an HVAC system over some period of time Luminaire: URIRef # A complete lighting unit consisting of a lamp or lamps and ballast(s) (when applicable) together with the parts designed to distribute the light, to position and protect the lamps, and to connect the lamps to the power supply. Luminaire_Driver: URIRef # A power source for a luminaire Luminance_Alarm: URIRef Luminance_Command: URIRef # Controls the amount of luminance delivered by a lighting system Luminance_Sensor: URIRef # Measures the luminous intensity per unit area of light travelling in a given direction Luminance_Setpoint: URIRef # Sets luminance MAU: URIRef # See Makeup_Air_Unit MDF: URIRef # A room for the Main Distribution Frame, the central place of a building where cables carrying signals meet and connect to the outside world Mail_Room: URIRef # A room where mail is received and sorted for distribution to the rest of the building Maintenance_Mode_Command: URIRef # Controls whether or not a device or controller is operating in "Maintenance" mode Maintenance_Required_Alarm: URIRef # An alarm that indicates that repair/maintenance is required on an associated device or equipment Majlis: URIRef # In Arab countries, an Majlis is a private lounge where visitors are received and entertained Makeup_Air_Unit: URIRef # A device designed to condition ventilation air introduced into a space or to replace air exhausted from a process or general area exhaust. The device may be used to prevent negative pressure within buildings or to reduce airborne contaminants in a space. Makeup_Water: URIRef # Water used used to makeup water loss through leaks, evaporation, or blowdown Makeup_Water_Valve: URIRef # A valve regulating the flow of makeup water into a water holding tank, e.g. a cooling tower, hot water tank Manual_Auto_Status: URIRef # Indicates if a system is under manual or automatic operation Massage_Room: URIRef # Usually adjunct to an athletic facility, a private/semi-private space where massages are performed Max_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Air_Flow_Setpoint. Max_Air_Temperature_Setpoint: URIRef # Setpoint for maximum air temperature Max_Chilled_Water_Differential_Pressure_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Chilled_Water_Differential_Pressure_Setpoint. Max_Cooling_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Cooling_Discharge_Air_Flow_Setpoint. Max_Cooling_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Cooling_Supply_Air_Flow_Setpoint. Max_Discharge_Air_Static_Pressure_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Discharge_Air_Static_Pressure_Setpoint. Max_Discharge_Air_Temperature_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Discharge_Air_Temperature_Setpoint. Max_Frequency_Command: URIRef # Sets the maximum permitted frequency Max_Heating_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Heating_Discharge_Air_Flow_Setpoint. Max_Heating_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Heating_Supply_Air_Flow_Setpoint. Max_Hot_Water_Differential_Pressure_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Hot_Water_Differential_Pressure_Setpoint. Max_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Setpoint. Max_Load_Setpoint: URIRef Max_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Occupied_Cooling_Discharge_Air_Flow_Setpoint. Max_Occupied_Cooling_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Occupied_Cooling_Supply_Air_Flow_Setpoint. Max_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Occupied_Heating_Discharge_Air_Flow_Setpoint. Max_Occupied_Heating_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Occupied_Heating_Supply_Air_Flow_Setpoint. Max_Position_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Position_Setpoint. Max_Speed_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Speed_Setpoint. Max_Static_Pressure_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Static_Pressure_Setpoint. Max_Supply_Air_Static_Pressure_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Supply_Air_Static_Pressure_Setpoint. Max_Temperature_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Temperature_Setpoint. Max_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Unoccupied_Cooling_Discharge_Air_Flow_Setpoint. Max_Unoccupied_Cooling_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Unoccupied_Cooling_Supply_Air_Flow_Setpoint. Max_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Unoccupied_Heating_Discharge_Air_Flow_Setpoint. Max_Unoccupied_Heating_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Unoccupied_Heating_Supply_Air_Flow_Setpoint. Max_Water_Level_Alarm: URIRef # Alarm indicating that the maximum water level was reached Max_Water_Temperature_Setpoint: URIRef # Setpoint for max water temperature Measurable: URIRef Mechanical_Room: URIRef # A class of service rooms where mechanical equipment (HVAC) operates Media_Hot_Desk: URIRef # A non-enclosed space used by members of the media temporarily to cover an event while they are present at a venue Media_Production_Room: URIRef # A enclosed space used by media professionals for the production of media Media_Room: URIRef # A class of spaces related to the creation of media Medical_Room: URIRef # A class of rooms used for medical purposes Medium_Temperature_Hot_Water_Differential_Pressure_Load_Shed_Reset_Status: URIRef Medium_Temperature_Hot_Water_Differential_Pressure_Load_Shed_Setpoint: URIRef Medium_Temperature_Hot_Water_Differential_Pressure_Load_Shed_Status: URIRef Medium_Temperature_Hot_Water_Differential_Pressure_Sensor: URIRef # Measures the difference in water pressure between sections of a medium temperature hot water system Medium_Temperature_Hot_Water_Differential_Pressure_Setpoint: URIRef Medium_Temperature_Hot_Water_Discharge_Temperature_High_Reset_Setpoint: URIRef Medium_Temperature_Hot_Water_Discharge_Temperature_Low_Reset_Setpoint: URIRef Medium_Temperature_Hot_Water_Return_Temperature_Sensor: URIRef # Measures the temperature of medium-temperature hot water returned to a hot water system Medium_Temperature_Hot_Water_Supply_Temperature_High_Reset_Setpoint: URIRef Medium_Temperature_Hot_Water_Supply_Temperature_Load_Shed_Setpoint: URIRef Medium_Temperature_Hot_Water_Supply_Temperature_Load_Shed_Status: URIRef Medium_Temperature_Hot_Water_Supply_Temperature_Low_Reset_Setpoint: URIRef Medium_Temperature_Hot_Water_Supply_Temperature_Sensor: URIRef # Measures the temperature of medium-temperature hot water supplied by a hot water system Meter: URIRef # A device that measure usage or consumption of some media --- typically a form energy or power. Methane_Level_Sensor: URIRef # Measures the concentration of methane in air Min_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Air_Flow_Setpoint. Min_Air_Temperature_Setpoint: URIRef # Setpoint for minimum air temperature Min_Chilled_Water_Differential_Pressure_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Chilled_Water_Differential_Pressure_Setpoint. Min_Cooling_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Cooling_Discharge_Air_Flow_Setpoint. Min_Cooling_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Cooling_Supply_Air_Flow_Setpoint. Min_Discharge_Air_Static_Pressure_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Discharge_Air_Static_Pressure_Setpoint. Min_Discharge_Air_Temperature_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Discharge_Air_Temperature_Setpoint. Min_Fresh_Air_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Fresh_Air_Setpoint. Min_Heating_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Heating_Discharge_Air_Flow_Setpoint. Min_Heating_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Heating_Supply_Air_Flow_Setpoint. Min_Hot_Water_Differential_Pressure_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Hot_Water_Differential_Pressure_Setpoint. Min_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Setpoint. Min_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Occupied_Cooling_Discharge_Air_Flow_Setpoint. Min_Occupied_Cooling_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Occupied_Cooling_Supply_Air_Flow_Setpoint. Min_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Occupied_Heating_Discharge_Air_Flow_Setpoint. Min_Occupied_Heating_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Occupied_Heating_Supply_Air_Flow_Setpoint. Min_Outside_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Outside_Air_Flow_Setpoint. Min_Position_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Position_Setpoint. Min_Speed_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Speed_Setpoint. Min_Static_Pressure_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Static_Pressure_Setpoint. Min_Supply_Air_Static_Pressure_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Supply_Air_Static_Pressure_Setpoint. Min_Temperature_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Temperature_Setpoint. Min_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Unoccupied_Cooling_Discharge_Air_Flow_Setpoint. Min_Unoccupied_Cooling_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Unoccupied_Cooling_Supply_Air_Flow_Setpoint. Min_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Unoccupied_Heating_Discharge_Air_Flow_Setpoint. Min_Unoccupied_Heating_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Unoccupied_Heating_Supply_Air_Flow_Setpoint. Min_Water_Level_Alarm: URIRef # Alarm indicating that the minimum water level was reached Min_Water_Temperature_Setpoint: URIRef # Setpoint for min water temperature Mixed_Air: URIRef # (1) air that contains two or more streams of air. (2) combined outdoor air and recirculated air. Mixed_Air_Filter: URIRef # A filter that is applied to the mixture of recirculated and outside air Mixed_Air_Flow_Sensor: URIRef # Measures the rate of flow of mixed air Mixed_Air_Humidity_Sensor: URIRef # Measures the humidity of mixed air Mixed_Air_Humidity_Setpoint: URIRef # Humidity setpoint for mixed air Mixed_Air_Temperature_Sensor: URIRef # Measures the temperature of mixed air Mixed_Air_Temperature_Setpoint: URIRef # Sets temperature of mixed air Mixed_Damper: URIRef # A damper that modulates the flow of the mixed outside and return air streams Mode_Command: URIRef # Controls the operating mode of a device or controller Mode_Status: URIRef # Indicates which mode a system, device or control loop is currently in Motion_Sensor: URIRef # Detects the presence of motion in some area Motor: URIRef # A machine in which power is applied to do work by the conversion of various forms of energy into mechanical force and motion. Motor_Control_Center: URIRef # The Motor Control Center is a specialized type of switchgear which provides electrical power to major mechanical systems in the building such as HVAC components. Motor_Current_Sensor: URIRef # Measures the current consumed by a motor Motor_Direction_Status: URIRef # Indicates which direction a motor is operating in, e.g. forward or reverse Motor_On_Off_Status: URIRef Motor_Speed_Sensor: URIRef Motor_Torque_Sensor: URIRef # Measures the torque, or rotating power, of a motor NO2_Level_Sensor: URIRef # Measures the concentration of NO2 in air NVR: URIRef Natural_Gas: URIRef # Fossil fuel energy source consisting largely of methane and other hydrocarbons Natural_Gas_Boiler: URIRef # A closed, pressure vessel that uses natural gas for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Network_Video_Recorder: URIRef No_Water_Alarm: URIRef # Alarm indicating that there is no water in the equipment or system Noncondensing_Natural_Gas_Boiler: URIRef # A closed, pressure vessel that uses natural gas with no system to capture latent heat for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Occupancy_Command: URIRef # Controls whether or not a device or controller is operating in "Occupied" mode Occupancy_Sensor: URIRef # Detects occupancy of some space or area Occupancy_Status: URIRef # Indicates if a room or space is occupied Occupied_Air_Temperature_Setpoint: URIRef Occupied_Cooling_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for cooling when occupied Occupied_Cooling_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate for cooling when occupied Occupied_Cooling_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature for cooling when occupied Occupied_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow when occupied Occupied_Discharge_Air_Temperature_Setpoint: URIRef Occupied_Heating_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for heating when occupied Occupied_Heating_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate for heating when occupied Occupied_Heating_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature for heating when occupied Occupied_Mode_Status: URIRef # Indicates if a system, device or control loop is in "Occupied" mode Occupied_Return_Air_Temperature_Setpoint: URIRef Occupied_Room_Air_Temperature_Setpoint: URIRef Occupied_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate when occupied Occupied_Supply_Air_Temperature_Setpoint: URIRef Occupied_Zone_Air_Temperature_Setpoint: URIRef Off_Command: URIRef # An Off Command controls or reports the binary 'off' status of a control loop, relay or equipment activity. It can only be used to stop/deactivate an associated equipment or process, or determine that the related entity is 'off' Off_Status: URIRef # Indicates if a control loop, relay or equipment is off Office: URIRef # A class of rooms dedicated for work or study Office_Kitchen: URIRef # A common space, usually near or in a breakroom, where minor food preparation occurs Oil: URIRef # a viscous liquid derived from petroleum, especially for use as a fuel or lubricant. On_Command: URIRef # An On Command controls or reports the binary 'on' status of a control loop, relay or equipment activity. It can only be used to start/activate an associated equipment or process, or determine that the related entity is 'on' On_Off_Command: URIRef # An On/Off Command controls or reports the binary status of a control loop, relay or equipment activity On_Off_Status: URIRef # Indicates the on/off status of a control loop, relay or equipment On_Status: URIRef # Indicates if a control loop, relay or equipment is on On_Timer_Sensor: URIRef # Measures the duration for which a device was in an active or "on" state Open_Close_Status: URIRef # Indicates the open/close status of a device such as a damper or valve Open_Heating_Valve_Outside_Air_Temperature_Setpoint: URIRef Open_Office: URIRef # An open space used for work or study by multiple people. Usuaully subdivided into cubicles or desks Operating_Mode_Status: URIRef # Indicates the current operating mode of a system, device or control loop Outdoor_Area: URIRef # A class of spaces that exist outside of a building Output_Frequency_Sensor: URIRef Output_Voltage_Sensor: URIRef # Measures the voltage output by some process or device Outside: URIRef Outside_Air: URIRef # air external to a defined zone (e.g., corridors). Outside_Air_CO2_Sensor: URIRef # Measures the concentration of CO2 in outside air Outside_Air_CO_Sensor: URIRef # Measures the concentration of CO in outside air Outside_Air_Dewpoint_Sensor: URIRef # Senses the dewpoint temperature of outside air Outside_Air_Enthalpy_Sensor: URIRef # Measures the total heat content of outside air Outside_Air_Flow_Sensor: URIRef # Measures the rate of flow of outside air into the system Outside_Air_Flow_Setpoint: URIRef # Sets outside air flow rate Outside_Air_Grains_Sensor: URIRef # Measures the mass of water vapor in outside air Outside_Air_Humidity_Sensor: URIRef # Measures the relative humidity of outside air Outside_Air_Humidity_Setpoint: URIRef # Humidity setpoint for outside air Outside_Air_Lockout_Temperature_Differential_Parameter: URIRef Outside_Air_Lockout_Temperature_Setpoint: URIRef Outside_Air_Temperature_Enable_Differential_Sensor: URIRef Outside_Air_Temperature_High_Reset_Setpoint: URIRef Outside_Air_Temperature_Low_Reset_Setpoint: URIRef Outside_Air_Temperature_Sensor: URIRef # Measures the temperature of outside air Outside_Air_Temperature_Setpoint: URIRef # Sets temperature of outside air Outside_Air_Wet_Bulb_Temperature_Sensor: URIRef # A sensor measuring the wet-bulb temperature of outside air Outside_Damper: URIRef # A damper that modulates the flow of outside air Outside_Face_Surface_Temperature_Sensor: URIRef # Measures the outside surface (relative to the space) of the radiant panel of a radiant heating and cooling HVAC system. Outside_Face_Surface_Temperature_Setpoint: URIRef # Sets temperature for the outside face surface temperature of the radiant panel. Outside_Illuminance_Sensor: URIRef # Measures the total luminous flux incident on an outside, per unit area Overload_Alarm: URIRef # An alarm that can indicate when a full-load current is exceeded. Overridden_Off_Status: URIRef # Indicates if a control loop, relay or equipment has been turned off when it would otherwise be scheduled to be on Overridden_On_Status: URIRef # Indicates if a control loop, relay or equipment has been turned on when it would otherwise be scheduled to be off Overridden_Status: URIRef # Indicates if the expected operating status of an equipment or control loop has been overridden Override_Command: URIRef # Controls or reports whether or not a device or control loop is in 'override' Ozone_Level_Sensor: URIRef # Measures the concentration of ozone in air PAU: URIRef # A type of AHU, use to pre-treat the outdoor air before feed to AHU PID_Parameter: URIRef PIR_Sensor: URIRef # Detects the presence of motion in some area using the differential change in infrared intensity between two or more receptors PM10_Level_Sensor: URIRef # Detects level of particulates of size 10 microns PM10_Sensor: URIRef # Detects matter of size 10 microns PM1_Level_Sensor: URIRef # Detects level of particulates of size 1 microns PM1_Sensor: URIRef # Detects matter of size 1 micron PVT_Panel: URIRef # A type of solar panels that convert solar radiation into usable thermal and electrical energy PV_Array: URIRef PV_Current_Output_Sensor: URIRef # See Photovoltaic_Current_Output_Sensor PV_Generation_System: URIRef # A collection of photovoltaic devices that generates energy PV_Panel: URIRef # An integrated assembly of interconnected photovoltaic cells designed to deliver a selected level of working voltage and current at its output terminals packaged for protection against environment degradation and suited for incorporation in photovoltaic power systems. Parameter: URIRef # Parameter points are configuration settings used to guide the operation of equipment and control systems; for example they may provide bounds on valid setpoint values Parking_Level: URIRef # A floor of a parking structure Parking_Space: URIRef # An area large enough to park an individual vehicle Parking_Structure: URIRef # A building or part of a building devoted to vehicle parking Particulate_Matter_Sensor: URIRef # Detects pollutants in the ambient air Passive_Chilled_Beam: URIRef # A chilled beam that does not have an integral air supply and instead relies on natural convection to draw air through the device. Peak_Power_Demand_Sensor: URIRef # The peak power consumed by a process over some period of time Photovoltaic_Array: URIRef # A collection of photovoltaic panels Photovoltaic_Current_Output_Sensor: URIRef # Senses the amperes of electrical current produced as output by a photovoltaic device Piezoelectric_Sensor: URIRef # Senses changes pressure, acceleration, temperature, force or strain via the piezoelectric effect PlugStrip: URIRef # A device containing a block of electrical sockets allowing multiple electrical devices to be powered from a single electrical socket. Plumbing_Room: URIRef # A service room devoted to the operation and routing of water in a building. Usually distinct from the HVAC subsystems. Point: URIRef Portfolio: URIRef # A collection of sites Position_Command: URIRef # Controls or reports the position of some object Position_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Position_Setpoint. Position_Sensor: URIRef # Measures the current position of a component in terms of a fraction of its full range of motion Potable_Water: URIRef # Water that is safe to drink Power_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with electrical power. Power_Loss_Alarm: URIRef # An alarm that indicates a power failure. Power_Sensor: URIRef # Measures the amount of instantaneous power consumed Prayer_Room: URIRef # A room set aside for prayer Pre_Filter: URIRef # A filter installed in front of a more efficient filter to extend the life of the more expensive higher efficiency filter Pre_Filter_Status: URIRef # Indicates if a prefilter needs to be replaced Preheat_Demand_Setpoint: URIRef # Sets the rate required for preheat Preheat_Discharge_Air_Temperature_Sensor: URIRef # Measures the temperature of discharge air before heating is applied Preheat_Hot_Water_System: URIRef Preheat_Hot_Water_Valve: URIRef Preheat_Supply_Air_Temperature_Sensor: URIRef # Measures the temperature of supply air before it is heated Pressure_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with pressure. Pressure_Sensor: URIRef # Measure the amount of force acting on a unit area Pressure_Setpoint: URIRef # Sets pressure Pressure_Status: URIRef # Indicates if pressure is within expected bounds Private_Office: URIRef # An office devoted to a single individual, with walls and door Proportional_Band_Parameter: URIRef Proportional_Gain_Parameter: URIRef Pump: URIRef # Machine for imparting energy to a fluid, causing it to do work, drawing a fluid into itself through an entrance port, and forcing the fluid out through an exhaust port. Pump_Command: URIRef # Controls or reports the speed of a pump (typically as a proportion of its full pumping capacity) Pump_On_Off_Status: URIRef Pump_Room: URIRef # A mechanical room that houses pumps Pump_VFD: URIRef # Variable-frequency drive for pumps Quantity: URIRef RC_Panel: URIRef # See Radiant_Ceiling_Panel RTU: URIRef # see Rooftop_Unit RVAV: URIRef # See Variable_Air_Volume_Box_With_Reheat Radiant_Ceiling_Panel: URIRef # Radiant panel heating and cooling system that are usually made from metal and suspended under the ceiling or insulated from the building structure. Radiant_Panel: URIRef # A temperature-controlled surface that provides fifty percent (50%) or more of the design heat transfer by thermal radiation. Radiant_Panel_Temperature_Sensor: URIRef # Measures the temperature of the radiant panel of the radiant heating and cooling HVAC system. Radiant_Panel_Temperature_Setpoint: URIRef # Sets temperature of radiant panel. Radiation_Hot_Water_System: URIRef Radiator: URIRef # Heat exchangers designed to transfer thermal energy from one medium to another Radioactivity_Concentration_Sensor: URIRef # Measures the concentration of radioactivity Radon_Concentration_Sensor: URIRef # Measures the concentration of radioactivity due to radon Rain_Duration_Sensor: URIRef # Measures the duration of precipitation within some time frame Rain_Sensor: URIRef # Measures the amount of precipitation fallen Rated_Speed_Setpoint: URIRef # Sets rated speed Reactive_Power_Sensor: URIRef # Measures the portion of power that, averaged over a complete cycle of the AC waveform, is due to stored energy which returns to the source in each cycle Reception: URIRef # A space, usually in a lobby, where visitors to a building or space can go to after arriving at a building and inform building staff that they have arrived Region: URIRef # A unit of geographic space, usually contiguous or somehow related to a geopolitical feature Reheat_Hot_Water_System: URIRef Reheat_Valve: URIRef # A valve that controls air temperature by modulating the amount of hot water flowing through a reheat coil Relative_Humidity_Sensor: URIRef # Measures the present state of absolute humidity relative to a maximum humidity given the same temperature Relief_Damper: URIRef # A damper that is a component of a Relief Air System, ensuring building doesn't become over-pressurised Relief_Fan: URIRef # A fan that is a component of a Relief Air System, ensuring building doesn't become over-pressurised Remotely_On_Off_Status: URIRef Reset_Command: URIRef # Commands that reset a flag, property or value to its default Reset_Setpoint: URIRef # Setpoints used in reset strategies Rest_Room: URIRef # A room that provides toilets and washbowls. Alternate spelling of Restroom Restroom: URIRef # A room that provides toilets and washbowls. Retail_Room: URIRef # A space set aside for retail in a larger establishment, e.g. a gift shop in a hospital Return_Air: URIRef # air removed from a space to be recirculated or exhausted. Air extracted from a space and totally or partially returned to an air conditioner, furnace, or other heating, cooling, or ventilating system. Return_Air_CO2_Sensor: URIRef # Measures the concentration of CO2 in return air Return_Air_CO2_Setpoint: URIRef # Sets some property of CO2 in Return Air Return_Air_CO_Sensor: URIRef # Measures the concentration of CO in return air Return_Air_Dewpoint_Sensor: URIRef # Senses the dewpoint temperature of return air Return_Air_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure between the return and supply side Return_Air_Differential_Pressure_Setpoint: URIRef # Sets the target air differential pressure between an upstream and downstream point in a return air duct or conduit Return_Air_Enthalpy_Sensor: URIRef # Measures the total heat content of return air Return_Air_Filter: URIRef # Filters return air Return_Air_Flow_Sensor: URIRef # Measures the rate of flow of return air Return_Air_Grains_Sensor: URIRef # Measures the mass of water vapor in return air Return_Air_Humidity_Sensor: URIRef # Measures the relative humidity of return air Return_Air_Humidity_Setpoint: URIRef # Humidity setpoint for return air Return_Air_Plenum: URIRef # A component of the HVAC the receives air from the room to recirculate or exhaust to or from the building Return_Air_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the temperature of return air. Return_Air_Temperature_High_Reset_Setpoint: URIRef Return_Air_Temperature_Low_Reset_Setpoint: URIRef Return_Air_Temperature_Sensor: URIRef # Measures the temperature of return air Return_Air_Temperature_Setpoint: URIRef # The target temperature for return air, often used as an approximation of zone air temperature Return_Chilled_Water_Temperature_Setpoint: URIRef # Sets the temperature of return (downstream of the chilled water load) chilled water Return_Condenser_Water: URIRef # In a condenser water loop, this is water being brought away from the condenser side of a heat-rejection device (e.g. chiller). It is the 'warm' side. Return_Condenser_Water_Flow_Sensor: URIRef # Measures the flow of the return condenser water Return_Condenser_Water_Temperature_Sensor: URIRef # Measures the temperature of the return condenser water Return_Condenser_Water_Temperature_Setpoint: URIRef # The temperature setpoint for the return condenser water Return_Damper: URIRef # A damper that modulates the flow of return air Return_Fan: URIRef # Fan moving return air -- air that is circulated from the building back into the HVAC system Return_Heating_Valve: URIRef # A valve installed on the return side of a heat exchanger Return_Hot_Water: URIRef Return_Hot_Water_Temperature_Setpoint: URIRef # Sets the temperature of return (downstream of the hot water load) hot water Return_Water: URIRef # The water is a system after it is used in a heat transfer cycle Return_Water_Flow_Sensor: URIRef Return_Water_Temperature_Sensor: URIRef # Measures the temperature of return water Return_Water_Temperature_Setpoint: URIRef # Sets the temperature of return water Riser: URIRef # A vertical shaft indented for installing building infrastructure e.g., electrical wire, network communication wire, plumbing, etc Rooftop: URIRef Rooftop_Unit: URIRef # Packaged air conditioner mounted on a roof, the conditioned air being discharged directly into the rooms below or through a duct system. Room: URIRef # Base class for all more specific room types. Room_Air_Temperature_Setpoint: URIRef # Sets temperature of room air Run_Enable_Command: URIRef Run_Request_Status: URIRef # Indicates if a request has been filed to start a device or equipment Run_Status: URIRef Run_Time_Sensor: URIRef # Measures the duration for which a device was in an active or "on" state Safety_Equipment: URIRef Safety_Shower: URIRef Safety_System: URIRef Sash_Position_Sensor: URIRef # Measures the current position of a sash in terms of the percent of fully open Schedule_Temperature_Setpoint: URIRef # The current setpoint as indicated by the schedule Security_Equipment: URIRef Security_Service_Room: URIRef # A class of spaces used by the security staff of a facility Sensor: URIRef # A Sensor is an input point that represents the value of a device or instrument designed to detect and measure a variable (ASHRAE Dictionary). Server_Room: URIRef Service_Room: URIRef # A class of spaces related to the operations of building subsystems, e.g. HVAC, electrical, IT, plumbing, etc Setpoint: URIRef # A Setpoint is an input value at which the desired property is set Shading_System: URIRef # Devices that can control daylighting through various means Shared_Office: URIRef # An office used by multiple people Short_Cycle_Alarm: URIRef # An alarm that indicates a short cycle occurred. A short cycle occurs when a cooling cycle is prevented from completing its full cycle Shower: URIRef # A space containing showers, usually adjacent to an athletic or execise area Site: URIRef # A geographic region containing 0 or more buildings. Typically used as the encapsulating location for a collection of Brick entities through the hasSite/isSiteOf relationships Smoke_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with smoke. Smoke_Detection_Alarm: URIRef Solar_Azimuth_Angle_Sensor: URIRef # Measures the azimuth angle of the sun Solar_Radiance_Sensor: URIRef # The amount of light that passes through or is emitted from the sun and falls within a given solid angle in a specified direction Solar_Thermal_Collector: URIRef # A type of solar panels that converts solar radiation into thermal energy. Solar_Zenith_Angle_Sensor: URIRef # Measures the zenith angle of the sun Solid: URIRef # one of the three states or phases of matter characterized by stability of dimensions, relative incompressibility, and molecular motion held to limited oscillation. Space: URIRef # A part of the physical world or a virtual world whose 3D spatial extent is bounded actually or theoretically, and provides for certain functions within the zone it is contained in. Space_Heater: URIRef # A heater used to warm the air in an enclosed area, such as a room or office Speed_Reset_Command: URIRef Speed_Sensor: URIRef # Measures the magnitude of velocity of some form of movement Speed_Setpoint: URIRef # Sets speed Speed_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Speed_Setpoint. Speed_Status: URIRef # Indicates the operating speed of a device or equipment, e.g. fan Sports_Service_Room: URIRef # A class of spaces used in the support of sports Stage_Enable_Command: URIRef # A point representing a discrete stage which the equipment should be operating at. The desired stage number should be identified by an entity property Stage_Riser: URIRef # A low platform in a space or on a stage Stages_Status: URIRef # Indicates which stage a control loop or equipment is in Staircase: URIRef # A vertical space containing stairs Standby_CRAC: URIRef # A CRAC that is activated as part of a lead/lag operation or when an alarm occurs in a primary unit Standby_Fan: URIRef # Fan that is activated as part of a lead/lag operation or when a primary fan raises an alarm Standby_Glycool_Unit_On_Off_Status: URIRef # Indicates the on/off status of a standby glycool unit Standby_Load_Shed_Command: URIRef Standby_Unit_On_Off_Status: URIRef # Indicates the on/off status of a standby unit Start_Stop_Command: URIRef # A Start/Stop Command controls or reports the active/inactive status of a control sequence Start_Stop_Status: URIRef # Indicates the active/inactive status of a control loop (but not equipment activities or relays -- use On/Off for this purpose) Static_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of static pressure Static_Pressure_Integral_Time_Parameter: URIRef Static_Pressure_Proportional_Band_Parameter: URIRef Static_Pressure_Sensor: URIRef # Measures resistance to airflow in a heating and cooling system's components and duct work Static_Pressure_Setpoint: URIRef # Sets static pressure Static_Pressure_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Static_Pressure_Setpoint. Static_Pressure_Step_Parameter: URIRef Status: URIRef # A Status is input point that reports the current operating mode, state, position, or condition of an item. Statuses are observations and should be considered 'read-only' Steam: URIRef # water in the vapor phase. Steam_Baseboard_Radiator: URIRef # Steam heating device located at or near the floor Steam_Distribution: URIRef # Utilize a steam distribution source to represent how steam is distributed across multiple destinations Steam_On_Off_Command: URIRef Steam_Radiator: URIRef # Radiator that uses steam Steam_System: URIRef # The equipment, devices and conduits that handle the production and distribution of steam in a building Steam_Usage_Sensor: URIRef # Measures the amount of steam that is consumed or used, over some period of time Steam_Valve: URIRef Step_Parameter: URIRef Storage_Room: URIRef # A class of spaces used for storage Storey: URIRef Studio: URIRef # A room used for the production or media, usually with either a specialized set or a specialized sound booth for recording Substance: URIRef Supply_Air: URIRef # (1) air delivered by mechanical or natural ventilation to a space, composed of any combination of outdoor air, recirculated air, or transfer air. (2) air entering a space from an air-conditioning, heating, or ventilating apparatus for the purpose of comfort conditioning. Supply air is generally filtered, fan forced, and either heated, cooled, humidified, or dehumidified as necessary to maintain specified conditions. Only the quantity of outdoor air within the supply airflow may be used as replacement air. Supply_Air_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure between an upstream and downstream of an air duct or other air conduit used to supply air into the building Supply_Air_Differential_Pressure_Setpoint: URIRef # Sets the target air differential pressure between an upstream and downstream point in a supply air duct or conduit Supply_Air_Duct_Pressure_Status: URIRef # Indicates if air pressure in supply duct is within expected bounds Supply_Air_Flow_Demand_Setpoint: URIRef # Sets the rate of supply air flow required for a process Supply_Air_Flow_Sensor: URIRef # Measures the rate of flow of supply air Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate Supply_Air_Humidity_Sensor: URIRef # Measures the relative humidity of supply air Supply_Air_Humidity_Setpoint: URIRef # Humidity setpoint for supply air Supply_Air_Integral_Gain_Parameter: URIRef Supply_Air_Plenum: URIRef # A component of the HVAC the receives air from the air handling unit to distribute to the building Supply_Air_Proportional_Gain_Parameter: URIRef Supply_Air_Static_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of static pressure of supply air Supply_Air_Static_Pressure_Integral_Time_Parameter: URIRef Supply_Air_Static_Pressure_Proportional_Band_Parameter: URIRef Supply_Air_Static_Pressure_Sensor: URIRef # The static pressure of air within supply regions of an HVAC system Supply_Air_Static_Pressure_Setpoint: URIRef # Sets static pressure of supply air Supply_Air_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the temperature of supply air. Supply_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of supply air Supply_Air_Temperature_High_Reset_Setpoint: URIRef Supply_Air_Temperature_Low_Reset_Setpoint: URIRef Supply_Air_Temperature_Proportional_Band_Parameter: URIRef Supply_Air_Temperature_Reset_Differential_Setpoint: URIRef Supply_Air_Temperature_Sensor: URIRef # Measures the temperature of supply air Supply_Air_Temperature_Setpoint: URIRef # Temperature setpoint for supply air Supply_Air_Temperature_Step_Parameter: URIRef Supply_Air_Velocity_Pressure_Sensor: URIRef Supply_Chilled_Water: URIRef Supply_Chilled_Water_Temperature_Setpoint: URIRef # Temperature setpoint for supply chilled water Supply_Condenser_Water: URIRef # In a condenser water loop, this is water being brought to the condenser side of a heat-rejection device (e.g. chiller). It is the 'cold' side. Supply_Condenser_Water_Flow_Sensor: URIRef # Measures the flow of the supply condenser water Supply_Condenser_Water_Temperature_Sensor: URIRef # Measures the temperature of the supply condenser water Supply_Condenser_Water_Temperature_Setpoint: URIRef # The temperature setpoint for the supply condenser water Supply_Fan: URIRef # Fan moving supply air -- air that is supplied from the HVAC system into the building Supply_Hot_Water: URIRef Supply_Hot_Water_Temperature_Setpoint: URIRef # Temperature setpoint for supply hot water Supply_Water: URIRef Supply_Water_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of supply water Supply_Water_Differential_Pressure_Integral_Time_Parameter: URIRef Supply_Water_Differential_Pressure_Proportional_Band_Parameter: URIRef Supply_Water_Flow_Sensor: URIRef # Measures the rate of flow of hot supply water Supply_Water_Flow_Setpoint: URIRef # Sets the flow rate of hot supply water Supply_Water_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with temperature of the supply water. Supply_Water_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of supply water Supply_Water_Temperature_Integral_Time_Parameter: URIRef Supply_Water_Temperature_Proportional_Band_Parameter: URIRef Supply_Water_Temperature_Setpoint: URIRef # Sets temperature of supply water Surveillance_Camera: URIRef Switch: URIRef # A switch used to operate all or part of a lighting installation Switch_Room: URIRef # A telecommuncations room housing network switches Switchgear: URIRef # A main disconnect or service disconnect feeds power to a switchgear, which then distributes power to the rest of the building through smaller amperage-rated disconnects. System: URIRef # A System is a combination of equipment and auxiliary devices (e.g., controls, accessories, interconnecting means, and termi­nal elements) by which energy is transformed so it performs a specific function such as HVAC, service water heating, or lighting. (ASHRAE Dictionary). System_Enable_Command: URIRef # Enables operation of a system System_Shutdown_Status: URIRef # Indicates if a system has been shutdown System_Status: URIRef # Indicates properties of the activity of a system TABS_Panel: URIRef # See Thermally_Activated_Building_System_Panel TETRA_Room: URIRef # A room used for local two-way radio networks, e.g. the portable radios carried by facilities staff TVOC_Level_Sensor: URIRef # A sensor measuring the level of all VOCs in air TVOC_Sensor: URIRef Team_Room: URIRef # An office used by multiple team members for specific work tasks. Distinct from Conference Room Telecom_Room: URIRef # A class of spaces used to support telecommuncations and IT equipment Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with temperature. Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature Temperature_Differential_Reset_Setpoint: URIRef Temperature_High_Reset_Setpoint: URIRef Temperature_Low_Reset_Setpoint: URIRef Temperature_Parameter: URIRef # Parameters relevant to temperature-related systems and points Temperature_Sensor: URIRef # Measures temperature: the physical property of matter that quantitatively expresses the common notions of hot and cold Temperature_Setpoint: URIRef # Sets temperature Temperature_Step_Parameter: URIRef Temperature_Tolerance_Parameter: URIRef # A parameter determining the difference between upper and lower limits of temperature. Temporary_Occupancy_Status: URIRef # For systems that differentiate between scheduled occupied/unoccupied mode, this indicates if a space is temporarily occupied when it would otherwise be unoccupied Terminal_Unit: URIRef # A device that regulates the volumetric flow rate and/or the temperature of the controlled medium. Thermal_Power_Meter: URIRef # A standalone thermal power meter Thermal_Power_Sensor: URIRef Thermally_Activated_Building_System_Panel: URIRef # Radiant panel heating and cooling system where the energy heat source or sink is embedded in the building structure such as in slabs and walls. Thermostat: URIRef # An automatic control device used to maintain temperature at a fixed or adjustable setpoint. Ticketing_Booth: URIRef # A room or space used to sell or distribute tickets to events at a venue Time_Parameter: URIRef Time_Setpoint: URIRef Tolerance_Parameter: URIRef # difference between upper and lower limits of size for a given nominal dimension or value. Torque_Sensor: URIRef # Measures torque, the tendency of a force to rotate an object about some axis Touchpanel: URIRef # A switch used to operate all or part of a lighting installation that uses a touch-based mechanism (typically resistive or capacitive) rather than a mechanical actuator Trace_Heat_Sensor: URIRef # Measures the surface temperature of pipelines carrying temperature-sensitive products; typically used to avoid frosting/freezing Transformer: URIRef # A Transformer is usually fed by a high-voltage source and then steps down the voltage to a lower-voltage feed for low-voltage application (such as lights). Transformers also can step up voltage, but this generally does not apply to in building distribution. Transformer_Room: URIRef # An electrical room where electricity enters and is transformed to different voltages and currents by the equipment contained in the room Tunnel: URIRef # An enclosed space that connects buildings. Often underground Underfloor_Air_Plenum: URIRef # An open space between a structural concrete slab and the underside of a raised access floor system that connects to an air handling unit to receive conditioned and/or ventilating air before delivery to the room(s) Underfloor_Air_Plenum_Static_Pressure_Sensor: URIRef # Measures the outward push of air against the plenum surfaces and used to measure the resistance when air moves through the plenum Underfloor_Air_Plenum_Static_Pressure_Setpoint: URIRef # Sets the underfloor air plenum static pressure Underfloor_Air_Temperature_Sensor: URIRef # Measures the temperature of underfloor air Unit_Failure_Alarm: URIRef # An alarm that indicates the failure of an equipment or device Unoccupied_Air_Temperature_Cooling_Setpoint: URIRef # Sets temperature of air when unoccupied for cooling Unoccupied_Air_Temperature_Heating_Setpoint: URIRef # Sets temperature of air when unoccupied for heating Unoccupied_Air_Temperature_Setpoint: URIRef # Sets temperature of air when unoccupied Unoccupied_Cooling_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for cooling when unoccupied Unoccupied_Discharge_Air_Temperature_Setpoint: URIRef Unoccupied_Load_Shed_Command: URIRef Unoccupied_Return_Air_Temperature_Setpoint: URIRef Unoccupied_Room_Air_Temperature_Setpoint: URIRef Unoccupied_Supply_Air_Temperature_Setpoint: URIRef Unoccupied_Zone_Air_Temperature_Setpoint: URIRef Usage_Sensor: URIRef # Measures the amount of some substance that is consumed or used, over some period of time VAV: URIRef # See Variable_Air_Volume_Box VFD: URIRef # Electronic device that varies its output frequency to vary the rotating speed of a motor, given a fixed input frequency. Used with fans or pumps to vary the flow in the system as a function of a maintained pressure. VFD_Enable_Command: URIRef # Enables operation of a variable frequency drive Valve: URIRef # A device that regulates, directs or controls the flow of a fluid by opening, closing or partially obstructing various passageways Valve_Command: URIRef # Controls or reports the openness of a valve (typically as a proportion of its full range of motion) Valve_Position_Sensor: URIRef # Measures the current position of a valve in terms of the percent of fully open Variable_Air_Volume_Box: URIRef # A device that regulates the volume and temperature of air delivered to a zone by opening or closing a damper Variable_Air_Volume_Box_With_Reheat: URIRef # A VAV box with a reheat coil mounted on the discharge end of the unit that can heat the air delivered to a zone Variable_Frequency_Drive: URIRef # Electronic device that varies its output frequency to vary the rotating speed of a motor, given a fixed input frequency. Used with fans or pumps to vary the flow in the system as a function of a maintained pressure. Velocity_Pressure_Sensor: URIRef # Measures the difference between total pressure and static pressure Velocity_Pressure_Setpoint: URIRef # Sets static veloicty pressure Vent_Operating_Mode_Status: URIRef # Indicates the current operating mode of a vent Ventilation_Air_Flow_Ratio_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Ventilation_Air_Flow_Ratio_Setpoint. Ventilation_Air_System: URIRef # The equipment, devices, and conduits that handle the introduction and distribution of ventilation air in the building Vertical_Space: URIRef # A class of spaces used to connect multiple floors or levels.. Video_Intercom: URIRef Video_Surveillance_Equipment: URIRef Visitor_Lobby: URIRef # A lobby for visitors to the building. Sometimes used to distinguish from an employee entrance looby Voltage_Imbalance_Sensor: URIRef # A sensor which measures the voltage difference (imbalance) between phases of an electrical system Voltage_Sensor: URIRef # Measures the voltage of an electrical device or object Wardrobe: URIRef # Storage for clothing, costumes, or uniforms Warm_Cool_Adjust_Sensor: URIRef # User provided adjustment of zone temperature, typically in the range of +/- 5 degrees Warmest_Zone_Air_Temperature_Sensor: URIRef # The zone temperature that is warmest; drives the supply temperature of cold air. A computed value rather than a physical sensor. Also referred to as a 'Highest Zone Air Temperature Sensor' Waste_Storage: URIRef # A room used for storing waste such as trash or recycling Water: URIRef # transparent, odorless, tasteless liquid; a compound of hydrogen and oxygen (H2O), containing 11.188% hydrogen and 88.812% oxygen by mass; freezing at 32°F (0°C); boiling near 212°F (100°C). Water_Alarm: URIRef # Alarm that indicates an undesirable event with a pipe, container, or equipment carrying water e.g. water leak Water_Differential_Pressure_Setpoint: URIRef # Sets the target water differential pressure between an upstream and downstream point in a water pipe or conduit Water_Differential_Temperature_Sensor: URIRef # Measures the difference in water temperature between an upstream and downstream point in a pipe or conduit Water_Differential_Temperature_Setpoint: URIRef # Sets the target differential temperature between the start and end of a heat transfer cycle in a water circuit Water_Distribution: URIRef # Utilize a water distribution source to represent how water is distributed across multiple destinations (pipes) Water_Flow_Sensor: URIRef # Measures the rate of flow of water Water_Flow_Setpoint: URIRef # Sets the target flow rate of water Water_Heater: URIRef # An apparatus for heating and usually storing hot water Water_Level_Alarm: URIRef # An alarm that indicates a high or low water level e.g. in a basin Water_Level_Sensor: URIRef # Measures the height/level of water in some container Water_Loop: URIRef # A collection of equipment that transport and regulate water among each other Water_Loss_Alarm: URIRef # An alarm that indicates a loss of water e.g. during transport Water_Meter: URIRef # A meter that measures the usage or consumption of water Water_Pump: URIRef # A pump that performs work on water Water_System: URIRef # The equipment, devices and conduits that handle the production and distribution of water in a building Water_Tank: URIRef # A space used to hold water Water_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with temperature of water. Water_Temperature_Sensor: URIRef # Measures the temperature of water Water_Temperature_Setpoint: URIRef # Sets temperature of water Water_Usage_Sensor: URIRef # Measures the amount of water that is consumed, over some period of time Water_Valve: URIRef # A valve that modulates the flow of water Weather_Station: URIRef # A dedicated weather measurement station Wind_Direction_Sensor: URIRef # Measures the direction of wind in degrees relative to North Wind_Speed_Sensor: URIRef # Measured speed of wind, caused by air moving from high to low pressure Wing: URIRef # A wing is part of a building – or any feature of a building – that is subordinate to the main, central structure. Workshop: URIRef # A space used to house equipment that can be used to repair or fabricate things Zone: URIRef # (1) a separately controlled heated or cooled space. (2) one occupied space or several occupied spaces with similar occupancy category, occupant density, zone air distribution effectiveness, and zone primary airflow per unit area. (3) space or group of spaces within a building for which the heating, cooling, or lighting requirements are sufficiently similar that desired conditions can be maintained throughout by a single controlling device. Zone_Air: URIRef # air inside a defined zone (e.g., corridors). Zone_Air_Cooling_Temperature_Setpoint: URIRef # The upper (cooling) setpoint for zone air temperature Zone_Air_Dewpoint_Sensor: URIRef # Measures dewpoint of zone air Zone_Air_Heating_Temperature_Setpoint: URIRef # The lower (heating) setpoint for zone air temperature Zone_Air_Humidity_Sensor: URIRef # Measures the relative humidity of zone air Zone_Air_Humidity_Setpoint: URIRef # Humidity setpoint for zone air Zone_Air_Temperature_Sensor: URIRef # Measures the temperature of air in a zone Zone_Air_Temperature_Setpoint: URIRef # Sets temperature of zone air Zone_Standby_Load_Shed_Command: URIRef Zone_Unoccupied_Load_Shed_Command: URIRef # http://www.w3.org/2002/07/owl#Property feeds: URIRef # The subject is upstream of the object in the context of some sequential process; some media is passed between them feedsAir: URIRef # Passes air hasAddress: URIRef # To specify the address of a building. hasAssociatedTag: URIRef # The class is associated with the given tag hasInputSubstance: URIRef # The subject receives the given substance as an input to its internal process hasLocation: URIRef # Subject is physically located in the location given by the object hasOutputSubstance: URIRef # The subject produces or exports the given substance from its internal process hasPart: URIRef # The subject is composed in part of the entity given by the object hasPoint: URIRef # The subject has a source of telemetry identified by the object. In some systems the source of telemetry may be represented as a digital/analog input/output point hasQUDTReference: URIRef # Points to the relevant QUDT definition hasTag: URIRef # The subject has the given tag hasTimeseriesId: URIRef # The unique identifier (primary key) for this TimeseriesReference in some database hasUnit: URIRef # The QUDT unit associated with this Brick entity (usually a Brick Point instance or Entity Property) isAssociatedWith: URIRef # The tag is associated with the given class isFedBy: URIRef isLocationOf: URIRef # Subject is the physical location encapsulating the object isMeasuredBy: URIRef isPartOf: URIRef isPointOf: URIRef # The subject is a source of telemetry related to the object. In some systems the source of telemetry may be represented as a digital/analog input/output point isRegulatedBy: URIRef isTagOf: URIRef latitude: URIRef longitude: URIRef measures: URIRef # The subject measures a quantity or substance given by the object regulates: URIRef # The subject contributes to or performs the regulation of the substance given by the object storedAt: URIRef # A reference to where the data for this TimeseriesReference is stored timeseries: URIRef # Relates a Brick point to the TimeseriesReference that indicates where and how the data for this point is stored value: URIRef # The basic value of an entity property # https://brickschema.org/schema/Brick#EntityProperty aggregate: URIRef # Description of how the dta for this point is aggregated area: URIRef # Entity has 2-dimensional area azimuth: URIRef # (Horizontal) angle between a projected vector and a reference vector (typically a compass bearing). The projected vector usually indicates the direction of a face or plane. buildingPrimaryFunction: URIRef # Enumerated string applied to a site record to indicate the building's primary function. The list of primary functions is derived from the US Energy Star program (adopted from Project Haystack) buildingThermalTransmittance: URIRef # The area-weighted average heat transfer coefficient (commonly referred to as a U-value) for a building envelope conversionEfficiency: URIRef # The percent efficiency of the conversion process (usually to power or energy) carried out by the entity coolingCapacity: URIRef # Measurement of a chiller ability to remove heat (adopted from Project Haystack) coordinates: URIRef # The location of an entity in latitude/longitude currentFlowType: URIRef # The current flow type of the entity electricalPhaseCount: URIRef # Entity has these phases electricalPhases: URIRef # Entity has these electrical AC phases grossArea: URIRef # Entity has gross 2-dimensional area measuredModuleConversionEfficiency: URIRef # The measured percentage of sunlight that is converted into usable power measuredPowerOutput: URIRef # The nominal measured power output of the entity netArea: URIRef # Entity has net 2-dimensional area operationalStage: URIRef # The associated operational stage operationalStageCount: URIRef # The number of operational stages supported by this eqiupment panelArea: URIRef # Surface area of a panel, such as a PV panel powerComplexity: URIRef # Entity has this power complexity powerFlow: URIRef # Entity has this power flow relative to the building' ratedModuleConversionEfficiency: URIRef # The *rated* percentage of sunlight that is converted into usable power, as measured using Standard Test Conditions (STC): 1000 W/sqm irradiance, 25 degC panel temperature, no wind ratedPowerOutput: URIRef # The nominal rated power output of the entity temperatureCoefficientofPmax: URIRef # The % change in power output for every degree celsius that the entity is hotter than 25 degrees celsius thermalTransmittance: URIRef # The area-weighted average heat transfer coefficient (commonly referred to as a U-value) tilt: URIRef # The direction an entity is facing in degrees above the horizon volume: URIRef # Entity has 3-dimensional volume yearBuilt: URIRef # Four digit year that a building was first built. (adopted from Project Haystack) _extras = ["PM2.5_Sensor", "PM2.5_Level_Sensor"] _NS = Namespace("https://brickschema.org/schema/Brick#") rdflib-6.1.1/rdflib/namespace/_CSVW.py000066400000000000000000000312001415774155300174620ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class CSVW(DefinedNamespace): """ CSVW Namespace Vocabulary Terms This document describes the RDFS vocabulary description used in the Metadata Vocabulary for Tabular Data [[tabular-metadata]] along with the default JSON-LD Context. Generated from: http://www.w3.org/ns/csvw Date: 2020-05-26 14:19:58.184766 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property aboutUrl: URIRef # A URI template property that MAY be used to indicate what a cell contains information about. base: URIRef # An atomic property that contains a single string: a term defined in the default context representing a built-in datatype URL, as listed above. column: URIRef # An array property of column descriptions as described in section 5.6 Columns. columnReference: URIRef # A column reference property that holds either a single reference to a column description object within this schema, or an array of references. These form the referencing columns for the foreign key definition. commentPrefix: URIRef # An atomic property that sets the comment prefix flag to the single provided value, which MUST be a string. datatype: URIRef # An object property that contains either a single string that is the main datatype of the values of the cell or a datatype description object. If the value of this property is a string, it MUST be one of the built-in datatypes defined in section 5.11.1 Built-in Datatypes or an absolute URL; if it is an object then it describes a more specialised datatype. decimalChar: URIRef # A string whose value is used to represent a decimal point within the number. default: URIRef # An atomic property holding a single string that is used to create a default value for the cell in cases where the original string value is an empty string. delimiter: URIRef # An atomic property that sets the delimiter flag to the single provided value, which MUST be a string. describes: URIRef # From IANA describes: The relationship A 'describes' B asserts that resource A provides a description of resource B. There are no constraints on the format or representation of either A or B, neither are there any further constraints on either resource. dialect: URIRef # An object property that provides a single dialect description. If provided, dialect provides hints to processors about how to parse the referenced files to create tabular data models for the tables in the group. doubleQuote: URIRef # A boolean atomic property that, if `true`, sets the escape character flag to `"`. encoding: URIRef # An atomic property that sets the encoding flag to the single provided string value, which MUST be a defined in [[encoding]]. The default is "utf-8". foreignKey: URIRef # For a Table: a list of foreign keys on the table. For a Schema: an array property of foreign key definitions that define how the values from specified columns within this table link to rows within this table or other tables. format: URIRef # An atomic property that contains either a single string or an object that defines the format of a value of this type, used when parsing a string value as described in Parsing Cells in [[tabular-data-model]]. groupChar: URIRef # A string whose value is used to group digits within the number. header: URIRef # A boolean atomic property that, if `true`, sets the header row count flag to `1`, and if `false` to `0`, unless headerRowCount is provided, in which case the value provided for the header property is ignored. headerRowCount: URIRef # An numeric atomic property that sets the header row count flag to the single provided value, which must be a non-negative integer. lang: URIRef # An atomic property giving a single string language code as defined by [[BCP47]]. length: URIRef # The exact length of the value of the cell. lineTerminators: URIRef # An atomic property that sets the line terminators flag to either an array containing the single provided string value, or the provided array. maxExclusive: URIRef # An atomic property that contains a single number that is the maximum valid value (exclusive). maxInclusive: URIRef # An atomic property that contains a single number that is the maximum valid value (inclusive). maxLength: URIRef # A numeric atomic property that contains a single integer that is the maximum length of the value. minExclusive: URIRef # An atomic property that contains a single number that is the minimum valid value (exclusive). minInclusive: URIRef # An atomic property that contains a single number that is the minimum valid value (inclusive). minLength: URIRef # An atomic property that contains a single integer that is the minimum length of the value. name: URIRef # An atomic property that gives a single canonical name for the column. The value of this property becomes the name annotation for the described column. note: URIRef # An array property that provides an array of objects representing arbitrary annotations on the annotated tabular data model. null: URIRef # An atomic property giving the string or strings used for null values within the data. If the string value of the cell is equal to any one of these values, the cell value is `null`. ordered: URIRef # A boolean atomic property taking a single value which indicates whether a list that is the value of the cell is ordered (if `true`) or unordered (if `false`). pattern: URIRef # A regular expression string, in the syntax and interpreted as defined by [[ECMASCRIPT]]. primaryKey: URIRef # For Schema: A column reference property that holds either a single reference to a column description object or an array of references. For Row: a possibly empty list of cells whose values together provide a unique identifier for this row. This is similar to the name of a column. propertyUrl: URIRef # An URI template property that MAY be used to create a URI for a property if the table is mapped to another format. quoteChar: URIRef # An atomic property that sets the quote character flag to the single provided value, which must be a string or `null`. reference: URIRef # An object property that identifies a **referenced table** and a set of **referenced columns** within that table. referencedRow: URIRef # A possibly empty list of pairs of a foreign key and a row in a table within the same group of tables. required: URIRef # A boolean atomic property taking a single value which indicates whether the cell must have a non-null value. The default is `false`. resource: URIRef # A link property holding a URL that is the identifier for a specific table that is being referenced. row: URIRef # Relates a Table to each Row output. rowTitle: URIRef # A column reference property that holds either a single reference to a column description object or an array of references. rownum: URIRef # The position of the row amongst the rows of the Annotated Tabl, starting from 1 schemaReference: URIRef # A link property holding a URL that is the identifier for a schema that is being referenced. scriptFormat: URIRef # A link property giving the single URL for the format that is used by the script or template. separator: URIRef # An atomic property that MUST have a single string value that is the character used to separate items in the string value of the cell. skipBlankRows: URIRef # An boolean atomic property that sets the `skip blank rows` flag to the single provided boolean value. skipColumns: URIRef # An numeric atomic property that sets the `skip columns` flag to the single provided numeric value, which MUST be a non-negative integer. skipInitialSpace: URIRef # A boolean atomic property that, if `true`, sets the trim flag to "start". If `false`, to `false`. skipRows: URIRef # An numeric atomic property that sets the `skip rows` flag to the single provided numeric value, which MUST be a non-negative integer. source: URIRef # A single string atomic property that provides, if specified, the format to which the tabular data should be transformed prior to the transformation using the script or template. suppressOutput: URIRef # A boolean atomic property. If `true`, suppresses any output that would be generated when converting a table or cells within a column. table: URIRef # Relates an Table group to annotated tables. tableDirection: URIRef # One of `rtl`, `ltr` or `auto`. Indicates whether the tables in the group should be displayed with the first column on the right, on the left, or based on the first character in the table that has a specific direction. tableSchema: URIRef # An object property that provides a single schema description as described in section 5.5 Schemas, used as the default for all the tables in the group targetFormat: URIRef # A link property giving the single URL for the format that will be created through the transformation. textDirection: URIRef # An atomic property that must have a single value that is one of `rtl` or `ltr` (the default). title: URIRef # For a Transformation A natural language property that describes the format that will be generated from the transformation. For a Column: A natural language property that provides possible alternative names for the column. transformations: URIRef # An array property of transformation definitions that provide mechanisms to transform the tabular data into other formats. trim: URIRef # An atomic property that, if the boolean `true`, sets the trim flag to `true` and if the boolean `false` to `false`. If the value provided is a string, sets the trim flag to the provided value, which must be one of "true", "false", "start" or "end". url: URIRef # For a Table: This link property gives the single URL of the CSV file that the table is held in, relative to the location of the metadata document. For a Transformation: A link property giving the single URL of the file that the script or template is held in, relative to the location of the metadata document. valueUrl: URIRef # An URI template property that is used to map the values of cells into URLs. virtual: URIRef # A boolean atomic property taking a single value which indicates whether the column is a virtual column not present in the original source # http://www.w3.org/2000/01/rdf-schema#Class Cell: URIRef # A Cell represents a cell at the intersection of a Row and a Column within a Table. Column: URIRef # A Column represents a vertical arrangement of Cells within a Table. Datatype: URIRef # Describes facets of a datatype. Dialect: URIRef # A Dialect Description provides hints to parsers about how to parse a linked file. Direction: URIRef # The class of table/text directions. ForeignKey: URIRef # Describes relationships between Columns in one or more Tables. NumericFormat: URIRef # If the datatype is a numeric type, the format property indicates the expected format for that number. Its value must be either a single string or an object with one or more properties. Row: URIRef # A Row represents a horizontal arrangement of cells within a Table. Schema: URIRef # A Schema is a definition of a tabular format that may be common to multiple tables. Table: URIRef # An annotated table is a table that is annotated with additional metadata. TableGroup: URIRef # A Group of Tables comprises a set of Annotated Tables and a set of annotations that relate to those Tables. TableReference: URIRef # An object property that identifies a referenced table and a set of referenced columns within that table. Transformation: URIRef # A Transformation Definition is a definition of how tabular data can be transformed into another format. # http://www.w3.org/2000/01/rdf-schema#Datatype JSON: URIRef # A literal containing JSON. uriTemplate: URIRef # # http://www.w3.org/ns/csvw#Direction auto: URIRef # Indicates whether the tables in the group should be displayed based on the first character in the table that has a specific direction. inherit: URIRef # For `textDirection`, indicates that the direction is inherited from the `tableDirection` annotation of the `table`. ltr: URIRef # Indicates whether the tables in the group should be displayed with the first column on the right. rtl: URIRef # Indicates whether the tables in the group should be displayed with the first column on the left. # http://www.w3.org/ns/prov#Role csvEncodedTabularData: URIRef # Describes the role of a CSV file in the tabular data mapping. tabularMetadata: URIRef # Describes the role of a Metadata file in the tabular data mapping. _NS = Namespace("http://www.w3.org/ns/csvw#") rdflib-6.1.1/rdflib/namespace/_DC.py000066400000000000000000000031511415774155300171720ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class DC(DefinedNamespace): """ Dublin Core Metadata Element Set, Version 1.1 Generated from: https://www.dublincore.org/specifications/dublin-core/dcmi-terms/dublin_core_elements.ttl Date: 2020-05-26 14:19:58.671906 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property contributor: URIRef # An entity responsible for making contributions to the resource. coverage: URIRef # The spatial or temporal topic of the resource, spatial applicability of the resource, or jurisdiction under which the resource is relevant. creator: URIRef # An entity primarily responsible for making the resource. date: URIRef # A point or period of time associated with an event in the lifecycle of the resource. description: URIRef # An account of the resource. format: URIRef # The file format, physical medium, or dimensions of the resource. identifier: URIRef # An unambiguous reference to the resource within a given context. language: URIRef # A language of the resource. publisher: URIRef # An entity responsible for making the resource available. relation: URIRef # A related resource. rights: URIRef # Information about rights held in and over the resource. source: URIRef # A related resource from which the described resource is derived. subject: URIRef # The topic of the resource. title: URIRef # A name given to the resource. type: URIRef # The nature or genre of the resource. _NS = Namespace("http://purl.org/dc/elements/1.1/") rdflib-6.1.1/rdflib/namespace/_DCAM.py000066400000000000000000000015671415774155300174210ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class DCAM(DefinedNamespace): """ Metadata terms for vocabulary description Generated from: https://www.dublincore.org/specifications/dublin-core/dcmi-terms/dublin_core_abstract_model.ttl Date: 2020-05-26 14:20:00.970966 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property domainIncludes: URIRef # A suggested class for subjects of this property. memberOf: URIRef # A relationship between a resource and a vocabulary encoding scheme which indicates that the resource is a member of a set. rangeIncludes: URIRef # A suggested class for values of this property. # http://www.w3.org/2000/01/rdf-schema#Class VocabularyEncodingScheme: URIRef # An enumerated set of resources. _NS = Namespace("http://purl.org/dc/dcam/") rdflib-6.1.1/rdflib/namespace/_DCAT.py000066400000000000000000000125021415774155300174170ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class DCAT(DefinedNamespace): """ The data catalog vocabulary DCAT is an RDF vocabulary designed to facilitate interoperability between data catalogs published on the Web. By using DCAT to describe datasets in data catalogs, publishers increase discoverability and enable applications easily to consume metadata from multiple catalogs. It further enables decentralized publishing of catalogs and facilitates federated dataset search across sites. Aggregated DCAT metadata can serve as a manifest file to facilitate digital preservation. DCAT is defined at http://www.w3.org/TR/vocab-dcat/. Any variance between that normative document and this schema is an error in this schema. Generated from: https://www.w3.org/ns/dcat2.ttl Date: 2020-05-26 14:19:59.985854 """ # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property accessURL: URIRef # A URL of a resource that gives access to a distribution of the dataset. E.g. landing page, feed, SPARQL endpoint. Use for all cases except a simple download link, in which case downloadURL is preferred. bbox: URIRef # The geographic bounding box of a resource. byteSize: URIRef # The size of a distribution in bytes. centroid: URIRef # The geographic center (centroid) of a resource. compressFormat: URIRef # The compression format of the distribution in which the data is contained in a compressed form, e.g. to reduce the size of the downloadable file. contactPoint: URIRef # Relevant contact information for the catalogued resource. Use of vCard is recommended. dataset: URIRef # A collection of data that is listed in the catalog. distribution: URIRef # An available distribution of the dataset. downloadURL: URIRef # The URL of the downloadable file in a given format. E.g. CSV file or RDF file. The format is indicated by the distribution's dct:format and/or dcat:mediaType. endDate: URIRef # The end of the period. keyword: URIRef # A keyword or tag describing a resource. landingPage: URIRef # A Web page that can be navigated to in a Web browser to gain access to the catalog, a dataset, its distributions and/or additional information. mediaType: URIRef # The media type of the distribution as defined by IANA. packageFormat: URIRef # The package format of the distribution in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together. record: URIRef # A record describing the registration of a single dataset or data service that is part of the catalog. startDate: URIRef # The start of the period theme: URIRef # A main category of the resource. A resource can have multiple themes. themeTaxonomy: URIRef # The knowledge organization system (KOS) used to classify catalog's datasets. # http://www.w3.org/2000/01/rdf-schema#Class Catalog: URIRef # A curated collection of metadata about resources (e.g., datasets and data services in the context of a data catalog). CatalogRecord: URIRef # A record in a data catalog, describing the registration of a single dataset or data service. Dataset: URIRef # A collection of data, published or curated by a single source, and available for access or download in one or more representations. Distribution: URIRef # A specific representation of a dataset. A dataset might be available in multiple serializations that may differ in various ways, including natural language, media-type or format, schematic organization, temporal and spatial resolution, level of detail or profiles (which might specify any or all of the above). # http://www.w3.org/2002/07/owl#Class DataService: URIRef # A site or end-point providing operations related to the discovery of, access to, or processing functions on, data or related resources. Relationship: URIRef # An association class for attaching additional information to a relationship between DCAT Resources. Resource: URIRef # Resource published or curated by a single agent. Role: URIRef # A role is the function of a resource or agent with respect to another resource, in the context of resource attribution or resource relationships. # http://www.w3.org/2002/07/owl#DatatypeProperty spatialResolutionInMeters: URIRef # mínima separacíon espacial disponible en un conjunto de datos, medida en metros. temporalResolution: URIRef # minimum time period resolvable in a dataset. # http://www.w3.org/2002/07/owl#ObjectProperty accessService: URIRef # A site or end-point that gives access to the distribution of the dataset. catalog: URIRef # A catalog whose contents are of interest in the context of this catalog. endpointDescription: URIRef # A description of the service end-point, including its operations, parameters etc. endpointURL: URIRef # The root location or primary endpoint of the service (a web-resolvable IRI). hadRole: URIRef # The function of an entity or agent with respect to another entity or resource. qualifiedRelation: URIRef # Link to a description of a relationship with another resource. servesDataset: URIRef # A collection of data that this DataService can distribute. service: URIRef # A site or endpoint that is listed in the catalog. _NS = Namespace("http://www.w3.org/ns/dcat#") rdflib-6.1.1/rdflib/namespace/_DCMITYPE.py000066400000000000000000000024711415774155300201260ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class DCMITYPE(DefinedNamespace): """ DCMI Type Vocabulary Generated from: https://www.dublincore.org/specifications/dublin-core/dcmi-terms/dublin_core_type.ttl Date: 2020-05-26 14:19:59.084150 """ _fail = True # http://www.w3.org/2000/01/rdf-schema#Class Collection: URIRef # An aggregation of resources. Dataset: URIRef # Data encoded in a defined structure. Event: URIRef # A non-persistent, time-based occurrence. Image: URIRef # A visual representation other than text. InteractiveResource: URIRef # A resource requiring interaction from the user to be understood, executed, or experienced. MovingImage: URIRef # A series of visual representations imparting an impression of motion when shown in succession. PhysicalObject: URIRef # An inanimate, three-dimensional object or substance. Service: URIRef # A system that provides one or more functions. Software: URIRef # A computer program in source or compiled form. Sound: URIRef # A resource primarily intended to be heard. StillImage: URIRef # A static visual representation. Text: URIRef # A resource consisting primarily of words for reading. _NS = Namespace("http://purl.org/dc/dcmitype/") rdflib-6.1.1/rdflib/namespace/_DCTERMS.py000066400000000000000000000232341415774155300200110ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class DCTERMS(DefinedNamespace): """ DCMI Metadata Terms - other Generated from: https://www.dublincore.org/specifications/dublin-core/dcmi-terms/dublin_core_terms.ttl Date: 2020-05-26 14:20:00.590514 """ _fail = True # http://purl.org/dc/dcam/VocabularyEncodingScheme DCMIType: URIRef # The set of classes specified by the DCMI Type Vocabulary, used to categorize the nature or genre of the resource. DDC: URIRef # The set of conceptual resources specified by the Dewey Decimal Classification. IMT: URIRef # The set of media types specified by the Internet Assigned Numbers Authority. LCC: URIRef # The set of conceptual resources specified by the Library of Congress Classification. LCSH: URIRef # The set of labeled concepts specified by the Library of Congress Subject Headings. MESH: URIRef # The set of labeled concepts specified by the Medical Subject Headings. NLM: URIRef # The set of conceptual resources specified by the National Library of Medicine Classification. TGN: URIRef # The set of places specified by the Getty Thesaurus of Geographic Names. UDC: URIRef # The set of conceptual resources specified by the Universal Decimal Classification. # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property abstract: URIRef # A summary of the resource. accessRights: URIRef # Information about who access the resource or an indication of its security status. accrualMethod: URIRef # The method by which items are added to a collection. accrualPeriodicity: URIRef # The frequency with which items are added to a collection. accrualPolicy: URIRef # The policy governing the addition of items to a collection. alternative: URIRef # An alternative name for the resource. audience: URIRef # A class of agents for whom the resource is intended or useful. available: URIRef # Date that the resource became or will become available. bibliographicCitation: URIRef # A bibliographic reference for the resource. conformsTo: URIRef # An established standard to which the described resource conforms. contributor: URIRef # An entity responsible for making contributions to the resource. coverage: URIRef # The spatial or temporal topic of the resource, spatial applicability of the resource, or jurisdiction under which the resource is relevant. created: URIRef # Date of creation of the resource. creator: URIRef # An entity responsible for making the resource. date: URIRef # A point or period of time associated with an event in the lifecycle of the resource. dateAccepted: URIRef # Date of acceptance of the resource. dateCopyrighted: URIRef # Date of copyright of the resource. dateSubmitted: URIRef # Date of submission of the resource. description: URIRef # An account of the resource. educationLevel: URIRef # A class of agents, defined in terms of progression through an educational or training context, for which the described resource is intended. extent: URIRef # The size or duration of the resource. format: URIRef # The file format, physical medium, or dimensions of the resource. hasFormat: URIRef # A related resource that is substantially the same as the pre-existing described resource, but in another format. hasPart: URIRef # A related resource that is included either physically or logically in the described resource. hasVersion: URIRef # A related resource that is a version, edition, or adaptation of the described resource. identifier: URIRef # An unambiguous reference to the resource within a given context. instructionalMethod: URIRef # A process, used to engender knowledge, attitudes and skills, that the described resource is designed to support. isFormatOf: URIRef # A pre-existing related resource that is substantially the same as the described resource, but in another format. isPartOf: URIRef # A related resource in which the described resource is physically or logically included. isReferencedBy: URIRef # A related resource that references, cites, or otherwise points to the described resource. isReplacedBy: URIRef # A related resource that supplants, displaces, or supersedes the described resource. isRequiredBy: URIRef # A related resource that requires the described resource to support its function, delivery, or coherence. isVersionOf: URIRef # A related resource of which the described resource is a version, edition, or adaptation. issued: URIRef # Date of formal issuance of the resource. language: URIRef # A language of the resource. license: URIRef # A legal document giving official permission to do something with the resource. mediator: URIRef # An entity that mediates access to the resource. medium: URIRef # The material or physical carrier of the resource. modified: URIRef # Date on which the resource was changed. provenance: URIRef # A statement of any changes in ownership and custody of the resource since its creation that are significant for its authenticity, integrity, and interpretation. publisher: URIRef # An entity responsible for making the resource available. references: URIRef # A related resource that is referenced, cited, or otherwise pointed to by the described resource. relation: URIRef # A related resource. replaces: URIRef # A related resource that is supplanted, displaced, or superseded by the described resource. requires: URIRef # A related resource that is required by the described resource to support its function, delivery, or coherence. rights: URIRef # Information about rights held in and over the resource. rightsHolder: URIRef # A person or organization owning or managing rights over the resource. source: URIRef # A related resource from which the described resource is derived. spatial: URIRef # Spatial characteristics of the resource. subject: URIRef # A topic of the resource. tableOfContents: URIRef # A list of subunits of the resource. temporal: URIRef # Temporal characteristics of the resource. title: URIRef # A name given to the resource. type: URIRef # The nature or genre of the resource. valid: URIRef # Date (often a range) of validity of a resource. # http://www.w3.org/2000/01/rdf-schema#Class Agent: URIRef # A resource that acts or has the power to act. AgentClass: URIRef # A group of agents. BibliographicResource: URIRef # A book, article, or other documentary resource. FileFormat: URIRef # A digital resource format. Frequency: URIRef # A rate at which something recurs. Jurisdiction: URIRef # The extent or range of judicial, law enforcement, or other authority. LicenseDocument: URIRef # A legal document giving official permission to do something with a resource. LinguisticSystem: URIRef # A system of signs, symbols, sounds, gestures, or rules used in communication. Location: URIRef # A spatial region or named place. LocationPeriodOrJurisdiction: URIRef # A location, period of time, or jurisdiction. MediaType: URIRef # A file format or physical medium. MediaTypeOrExtent: URIRef # A media type or extent. MethodOfAccrual: URIRef # A method by which resources are added to a collection. MethodOfInstruction: URIRef # A process that is used to engender knowledge, attitudes, and skills. PeriodOfTime: URIRef # An interval of time that is named or defined by its start and end dates. PhysicalMedium: URIRef # A physical material or carrier. PhysicalResource: URIRef # A material thing. Policy: URIRef # A plan or course of action by an authority, intended to influence and determine decisions, actions, and other matters. ProvenanceStatement: URIRef # Any changes in ownership and custody of a resource since its creation that are significant for its authenticity, integrity, and interpretation. RightsStatement: URIRef # A statement about the intellectual property rights (IPR) held in or over a resource, a legal document giving official permission to do something with a resource, or a statement about access rights. SizeOrDuration: URIRef # A dimension or extent, or a time taken to play or execute. Standard: URIRef # A reference point against which other things can be evaluated or compared. # http://www.w3.org/2000/01/rdf-schema#Datatype Box: URIRef # The set of regions in space defined by their geographic coordinates according to the DCMI Box Encoding Scheme. ISO3166: URIRef # The set of codes listed in ISO 3166-1 for the representation of names of countries. Period: URIRef # The set of time intervals defined by their limits according to the DCMI Period Encoding Scheme. Point: URIRef # The set of points in space defined by their geographic coordinates according to the DCMI Point Encoding Scheme. RFC1766: URIRef # The set of tags, constructed according to RFC 1766, for the identification of languages. RFC3066: URIRef # The set of tags constructed according to RFC 3066 for the identification of languages. RFC4646: URIRef # The set of tags constructed according to RFC 4646 for the identification of languages. RFC5646: URIRef # The set of tags constructed according to RFC 5646 for the identification of languages. URI: URIRef # The set of identifiers constructed according to the generic syntax for Uniform Resource Identifiers as specified by the Internet Engineering Task Force. W3CDTF: URIRef # The set of dates and times constructed according to the W3C Date and Time Formats Specification. # Valid non-python identifiers _extras = ["ISO639-2", "ISO639-3"] _NS = Namespace("http://purl.org/dc/terms/") rdflib-6.1.1/rdflib/namespace/_DOAP.py000066400000000000000000000074411415774155300174350ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class DOAP(DefinedNamespace): """ Description of a Project (DOAP) vocabulary The Description of a Project (DOAP) vocabulary, described using W3C RDF Schema and the Web Ontology Language. Generated from: http://usefulinc.com/ns/doap Date: 2020-05-26 14:20:01.307972 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property audience: URIRef # Description of target user base blog: URIRef # URI of a blog related to a project browse: URIRef # Web browser interface to repository. category: URIRef # A category of project. created: URIRef # Date when something was created, in YYYY-MM-DD form. e.g. 2004-04-05 description: URIRef # Plain text description of a project, of 2-4 sentences in length. developer: URIRef # Developer of software for the project. documenter: URIRef # Contributor of documentation to the project. helper: URIRef # Project contributor. implements: URIRef # A specification that a project implements. Could be a standard, API or legally defined level of conformance. language: URIRef # ISO language code a project has been translated into license: URIRef # The URI of an RDF description of the license the software is distributed under. E.g. a SPDX reference location: URIRef # Location of a repository. maintainer: URIRef # Maintainer of a project, a project leader. module: URIRef # Module name of a Subversion, CVS, BitKeeper or Arch repository. name: URIRef # A name of something. os: URIRef # Operating system that a project is limited to. Omit this property if the project is not OS-specific. platform: URIRef # Indicator of software platform (non-OS specific), e.g. Java, Firefox, ECMA CLR release: URIRef # A project release. repository: URIRef # Source code repository. repositoryOf: URIRef # The project that uses a repository. revision: URIRef # Revision identifier of a software release. screenshots: URIRef # Web page with screenshots of project. shortdesc: URIRef # Short (8 or 9 words) plain text description of a project. tester: URIRef # A tester or other quality control contributor. translator: URIRef # Contributor of translations to the project. vendor: URIRef # Vendor organization: commercial, free or otherwise wiki: URIRef # URL of Wiki for collaborative discussion of project. # http://www.w3.org/2000/01/rdf-schema#Class ArchRepository: URIRef # GNU Arch source code repository. BKRepository: URIRef # BitKeeper source code repository. BazaarBranch: URIRef # Bazaar source code branch. CVSRepository: URIRef # CVS source code repository. DarcsRepository: URIRef # darcs source code repository. GitBranch: URIRef # Git source code branch. GitRepository: URIRef # Git source code repository. HgRepository: URIRef # Mercurial source code repository. Project: URIRef # A project. Repository: URIRef # Source code repository. SVNRepository: URIRef # Subversion source code repository. Specification: URIRef # A specification of a system's aspects, technical or otherwise. Version: URIRef # Version information of a project release. # http://www.w3.org/2002/07/owl#InverseFunctionalProperty homepage: URIRef # URL of a project's homepage, associated with exactly one project. # Valid non-python identifiers _extras = [ "anon-root", "bug-database", "developer-forum", "download-mirror", "download-page", "file-release", "mailing-list", "programming-language", "service-endpoint", "support-forum", "old-homepage", ] _NS = Namespace("http://usefulinc.com/ns/doap#") rdflib-6.1.1/rdflib/namespace/_FOAF.py000066400000000000000000000140671415774155300174270ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class FOAF(DefinedNamespace): """ Friend of a Friend (FOAF) vocabulary The Friend of a Friend (FOAF) RDF vocabulary, described using W3C RDF Schema and the Web Ontology Language. Generated from: http://xmlns.com/foaf/spec/index.rdf Date: 2020-05-26 14:20:01.597998 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property account: URIRef # Indicates an account held by this agent. accountName: URIRef # Indicates the name (identifier) associated with this online account. accountServiceHomepage: URIRef # Indicates a homepage of the service provide for this online account. age: URIRef # The age in years of some agent. based_near: URIRef # A location that something is based near, for some broadly human notion of near. birthday: URIRef # The birthday of this Agent, represented in mm-dd string form, eg. '12-31'. currentProject: URIRef # A current project this person works on. depiction: URIRef # A depiction of some thing. depicts: URIRef # A thing depicted in this representation. dnaChecksum: URIRef # A checksum for the DNA of some thing. Joke. familyName: URIRef # The family name of some person. family_name: URIRef # The family name of some person. firstName: URIRef # The first name of a person. focus: URIRef # The underlying or 'focal' entity associated with some SKOS-described concept. fundedBy: URIRef # An organization funding a project or person. geekcode: URIRef # A textual geekcode for this person, see http://www.geekcode.com/geek.html gender: URIRef # The gender of this Agent (typically but not necessarily 'male' or 'female'). givenName: URIRef # The given name of some person. givenname: URIRef # The given name of some person. holdsAccount: URIRef # Indicates an account held by this agent. img: URIRef # An image that can be used to represent some thing (ie. those depictions which are particularly representative of something, eg. one's photo on a homepage). interest: URIRef # A page about a topic of interest to this person. knows: URIRef # A person known by this person (indicating some level of reciprocated interaction between the parties). lastName: URIRef # The last name of a person. made: URIRef # Something that was made by this agent. maker: URIRef # An agent that made this thing. member: URIRef # Indicates a member of a Group membershipClass: URIRef # Indicates the class of individuals that are a member of a Group myersBriggs: URIRef # A Myers Briggs (MBTI) personality classification. name: URIRef # A name for some thing. nick: URIRef # A short informal nickname characterising an agent (includes login identifiers, IRC and other chat nicknames). page: URIRef # A page or document about this thing. pastProject: URIRef # A project this person has previously worked on. phone: URIRef # A phone, specified using fully qualified tel: URI scheme (refs: http://www.w3.org/Addressing/schemes.html#tel). plan: URIRef # A .plan comment, in the tradition of finger and '.plan' files. primaryTopic: URIRef # The primary topic of some page or document. publications: URIRef # A link to the publications of this person. schoolHomepage: URIRef # A homepage of a school attended by the person. sha1: URIRef # A sha1sum hash, in hex. skypeID: URIRef # A Skype ID status: URIRef # A string expressing what the user is happy for the general public (normally) to know about their current activity. surname: URIRef # The surname of some person. theme: URIRef # A theme. thumbnail: URIRef # A derived thumbnail image. tipjar: URIRef # A tipjar document for this agent, describing means for payment and reward. title: URIRef # Title (Mr, Mrs, Ms, Dr. etc) topic: URIRef # A topic of some page or document. topic_interest: URIRef # A thing of interest to this person. workInfoHomepage: URIRef # A work info homepage of some person; a page about their work for some organization. workplaceHomepage: URIRef # A workplace homepage of some person; the homepage of an organization they work for. # http://www.w3.org/2000/01/rdf-schema#Class Agent: URIRef # An agent (eg. person, group, software or physical artifact). Document: URIRef # A document. Group: URIRef # A class of Agents. Image: URIRef # An image. LabelProperty: URIRef # A foaf:LabelProperty is any RDF property with textual values that serve as labels. OnlineAccount: URIRef # An online account. OnlineChatAccount: URIRef # An online chat account. OnlineEcommerceAccount: URIRef # An online e-commerce account. OnlineGamingAccount: URIRef # An online gaming account. Organization: URIRef # An organization. Person: URIRef # A person. PersonalProfileDocument: URIRef # A personal profile RDF document. Project: URIRef # A project (a collective endeavour of some kind). # http://www.w3.org/2002/07/owl#InverseFunctionalProperty aimChatID: URIRef # An AIM chat ID homepage: URIRef # A homepage for some thing. icqChatID: URIRef # An ICQ chat ID isPrimaryTopicOf: URIRef # A document that this thing is the primary topic of. jabberID: URIRef # A jabber ID for something. logo: URIRef # A logo representing some thing. mbox: URIRef # A personal mailbox, ie. an Internet mailbox associated with exactly one owner, the first owner of this mailbox. This is a 'static inverse functional property', in that there is (across time and change) at most one individual that ever has any particular value for foaf:mbox. mbox_sha1sum: URIRef # The sha1sum of the URI of an Internet mailbox associated with exactly one owner, the first owner of the mailbox. msnChatID: URIRef # An MSN chat ID openid: URIRef # An OpenID for an Agent. weblog: URIRef # A weblog of some thing (whether person, group, company etc.). yahooChatID: URIRef # A Yahoo chat ID _NS = Namespace("http://xmlns.com/foaf/0.1/") rdflib-6.1.1/rdflib/namespace/_ODRL2.py000066400000000000000000000527561415774155300175450ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class ODRL2(DefinedNamespace): """ ODRL Version 2.2 The ODRL Vocabulary and Expression defines a set of concepts and terms (the vocabulary) and encoding mechanism (the expression) for permissions and obligations statements describing digital content usage based on the ODRL Information Model. Generated from: https://www.w3.org/ns/odrl/2/ODRL22.ttl Date: 2020-05-26 14:20:02.352356 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property action: URIRef # The operation relating to the Asset for which the Rule is being subjected. andSequence: URIRef # The relation is satisfied when each of the Constraints are satisfied in the order specified. assignee: URIRef # The Party is the recipient of the Rule. assigneeOf: URIRef # Identifies an ODRL Policy for which the identified Party undertakes the assignee functional role. assigner: URIRef # The Party is the issuer of the Rule. assignerOf: URIRef # Identifies an ODRL Policy for which the identified Party undertakes the assigner functional role. attributedParty: URIRef # The Party to be attributed. attributingParty: URIRef # The Party who undertakes the attribution. compensatedParty: URIRef # The Party is the recipient of the compensation. compensatingParty: URIRef # The Party that is the provider of the compensation. conflict: URIRef # The conflict-resolution strategy for a Policy. consentedParty: URIRef # The Party who obtains the consent. consentingParty: URIRef # The Party to obtain consent from. consequence: URIRef # Relates a Duty to another Duty, the latter being a consequence of not fulfilling the former. constraint: URIRef # Constraint applied to a Rule contractedParty: URIRef # The Party who is being contracted. contractingParty: URIRef # The Party who is offering the contract. dataType: URIRef # The datatype of the value of the rightOperand or rightOperandReference of a Constraint. duty: URIRef # Relates an individual Duty to a Permission. failure: URIRef # Failure is an abstract property that defines the violation (or unmet) relationship between Rules. function: URIRef # Function is an abstract property whose sub-properties define the functional roles which may be fulfilled by a party in relation to a Rule. hasPolicy: URIRef # Identifies an ODRL Policy for which the identified Asset is the target Asset to all the Rules. implies: URIRef # An Action asserts that another Action is not prohibited to enable its operational semantics. includedIn: URIRef # An Action transitively asserts that another Action that encompasses its operational semantics. informedParty: URIRef # The Party to be informed of all uses. informingParty: URIRef # The Party who provides the inform use data. inheritAllowed: URIRef # Indicates if the Policy entity can be inherited. inheritFrom: URIRef # Relates a (child) policy to another (parent) policy from which terms are inherited. inheritRelation: URIRef # Identifies the type of inheritance. leftOperand: URIRef # The left operand in a constraint expression. obligation: URIRef # Relates an individual Duty to a Policy. operand: URIRef # Operand is an abstract property for a logical relationship. operator: URIRef # The operator function applied to operands of a Constraint output: URIRef # The output property specifies the Asset which is created from the output of the Action. partOf: URIRef # Identifies an Asset/PartyCollection that the Asset/Party is a member of. payeeParty: URIRef # The Party is the recipient of the payment. permission: URIRef # Relates an individual Permission to a Policy. profile: URIRef # The identifier(s) of an ODRL Profile that the Policy conforms to. prohibition: URIRef # Relates an individual Prohibition to a Policy. proximity: URIRef # An value indicating the closeness or nearness. refinement: URIRef # Constraint used to refine the semantics of an Action, or Party/Asset Collection relation: URIRef # Relation is an abstract property which creates an explicit link between an Action and an Asset. remedy: URIRef # Relates an individual remedy Duty to a Prohibition. rightOperand: URIRef # The value of the right operand in a constraint expression. rightOperandReference: URIRef # A reference to a web resource providing the value for the right operand of a Constraint. scope: URIRef # The identifier of a scope that provides context to the extent of the entity. source: URIRef # Reference to a Asset/PartyCollection status: URIRef # the value generated from the leftOperand action or a value related to the leftOperand set as the reference for the comparison. target: URIRef # The target property indicates the Asset that is the primary subject to which the Rule action directly applies. timedCount: URIRef # The number of seconds after which timed metering use of the asset begins. trackedParty: URIRef # The Party whose usage is being tracked. trackingParty: URIRef # The Party who is tracking usage. uid: URIRef # An unambiguous identifier undefined: URIRef # Relates the strategy used for handling undefined actions to a Policy. unit: URIRef # The unit of measurement of the value of the rightOperand or rightOperandReference of a Constraint. xone: URIRef # The relation is satisfied when only one, and not more, of the Constraints is satisfied # http://www.w3.org/2002/07/owl#NamedIndividual All: URIRef # Specifies that the scope of the relationship is all of the collective individuals within a context. All2ndConnections: URIRef # Specifies that the scope of the relationship is all of the second-level connections to the Party. AllConnections: URIRef # Specifies that the scope of the relationship is all of the first-level connections of the Party. AllGroups: URIRef # Specifies that the scope of the relationship is all of the group connections of the Party. Group: URIRef # Specifies that the scope of the relationship is the defined group with multiple individual members. Individual: URIRef # Specifies that the scope of the relationship is the single Party individual. absolutePosition: URIRef # A point in space or time defined with absolute coordinates for the positioning of the target Asset. absoluteSize: URIRef # Measure(s) of one or two axes for 2D-objects or measure(s) of one to tree axes for 3D-objects of the target Asset. absoluteSpatialPosition: URIRef # The absolute spatial positions of four corners of a rectangle on a 2D-canvas or the eight corners of a cuboid in a 3D-space for the target Asset to fit. absoluteTemporalPosition: URIRef # The absolute temporal positions in a media stream the target Asset has to fit. count: URIRef # Numeric count of executions of the action of the Rule. dateTime: URIRef # The date (and optional time and timezone) of exercising the action of the Rule. Right operand value MUST be an xsd:date or xsd:dateTime as defined by [[xmlschema11-2]]. delayPeriod: URIRef # A time delay period prior to exercising the action of the Rule. The point in time triggering this period MAY be defined by another temporal Constraint combined by a Logical Constraint (utilising the odrl:andSequence operand). Right operand value MUST be an xsd:duration as defined by [[xmlschema11-2]]. deliveryChannel: URIRef # The delivery channel used for exercising the action of the Rule. device: URIRef # An identified device used for exercising the action of the Rule. elapsedTime: URIRef # A continuous elapsed time period which may be used for exercising of the action of the Rule. Right operand value MUST be an xsd:duration as defined by [[xmlschema11-2]]. eq: URIRef # Indicating that a given value equals the right operand of the Constraint. event: URIRef # An identified event setting a context for exercising the action of the Rule. fileFormat: URIRef # A transformed file format of the target Asset. gt: URIRef # Indicating that a given value is greater than the right operand of the Constraint. gteq: URIRef # Indicating that a given value is greater than or equal to the right operand of the Constraint. hasPart: URIRef # A set-based operator indicating that a given value contains the right operand of the Constraint. ignore: URIRef # The Action is to be ignored and is not part of the policy – and the policy remains valid. industry: URIRef # A defined industry sector setting a context for exercising the action of the Rule. invalid: URIRef # The policy is void. isA: URIRef # A set-based operator indicating that a given value is an instance of the right operand of the Constraint. isAllOf: URIRef # A set-based operator indicating that a given value is all of the right operand of the Constraint. isAnyOf: URIRef # A set-based operator indicating that a given value is any of the right operand of the Constraint. isNoneOf: URIRef # A set-based operator indicating that a given value is none of the right operand of the Constraint. isPartOf: URIRef # A set-based operator indicating that a given value is contained by the right operand of the Constraint. language: URIRef # A natural language used by the target Asset. lt: URIRef # Indicating that a given value is less than the right operand of the Constraint. lteq: URIRef # Indicating that a given value is less than or equal to the right operand of the Constraint. media: URIRef # Category of a media asset setting a context for exercising the action of the Rule. meteredTime: URIRef # An accumulated amount of one to many metered time periods which were used for exercising the action of the Rule. Right operand value MUST be an xsd:duration as defined by [[xmlschema11-2]]. neq: URIRef # Indicating that a given value is not equal to the right operand of the Constraint. payAmount: URIRef # The amount of a financial payment. Right operand value MUST be an xsd:decimal. percentage: URIRef # A percentage amount of the target Asset relevant for exercising the action of the Rule. Right operand value MUST be an xsd:decimal from 0 to 100. perm: URIRef # Permissions take preference over prohibitions. policyUsage: URIRef # Indicates the actual datetime the action of the Rule was exercised. product: URIRef # Category of product or service setting a context for exercising the action of the Rule. prohibit: URIRef # Prohibitions take preference over permissions. purpose: URIRef # A defined purpose for exercising the action of the Rule. recipient: URIRef # The party receiving the result/outcome of exercising the action of the Rule. relativePosition: URIRef # A point in space or time defined with coordinates relative to full measures the positioning of the target Asset. relativeSize: URIRef # Measure(s) of one or two axes for 2D-objects or measure(s) of one to tree axes for 3D-objects - expressed as percentages of full values - of the target Asset. relativeSpatialPosition: URIRef # The relative spatial positions - expressed as percentages of full values - of four corners of a rectangle on a 2D-canvas or the eight corners of a cuboid in a 3D-space of the target Asset. relativeTemporalPosition: URIRef # A point in space or time defined with coordinates relative to full measures the positioning of the target Asset. resolution: URIRef # Resolution of the rendition of the target Asset. spatial: URIRef # A named and identified geospatial area with defined borders which is used for exercising the action of the Rule. An IRI MUST be used to represent this value. spatialCoordinates: URIRef # A set of coordinates setting the borders of a geospatial area used for exercising the action of the Rule. The coordinates MUST include longitude and latitude, they MAY include altitude and the geodetic datum. support: URIRef # The Action is to be supported as part of the policy – and the policy remains valid. system: URIRef # An identified computing system used for exercising the action of the Rule. systemDevice: URIRef # An identified computing system or computing device used for exercising the action of the Rule. timeInterval: URIRef # A recurring period of time before the next execution of the action of the Rule. Right operand value MUST be an xsd:duration as defined by [[xmlschema11-2]]. unitOfCount: URIRef # The unit of measure used for counting the executions of the action of the Rule. version: URIRef # The version of the target Asset. virtualLocation: URIRef # An identified location of the IT communication space which is relevant for exercising the action of the Rule. # http://www.w3.org/2004/02/skos/core#Collection # http://www.w3.org/2004/02/skos/core#Concept Action: URIRef # An operation on an Asset. Agreement: URIRef # A Policy that grants the assignee a Rule over an Asset from an assigner. Assertion: URIRef # A Policy that asserts a Rule over an Asset from parties. Asset: URIRef # A resource or a collection of resources that are the subject of a Rule. AssetCollection: URIRef # An Asset that is collection of individual resources AssetScope: URIRef # Scopes for Asset Scope expressions. ConflictTerm: URIRef # Used to establish strategies to resolve conflicts that arise from the merging of Policies or conflicts between Permissions and Prohibitions in the same Policy. Constraint: URIRef # A boolean expression that refines the semantics of an Action and Party/Asset Collection or declare the conditions applicable to a Rule. Duty: URIRef # The obligation to perform an Action LeftOperand: URIRef # Left operand for a constraint expression. LogicalConstraint: URIRef # A logical expression that refines the semantics of an Action and Party/Asset Collection or declare the conditions applicable to a Rule. Offer: URIRef # A Policy that proposes a Rule over an Asset from an assigner. Operator: URIRef # Operator for constraint expression. Party: URIRef # An entity or a collection of entities that undertake Roles in a Rule. PartyCollection: URIRef # A Party that is a group of individual entities PartyScope: URIRef # Scopes for Party Scope expressions. Permission: URIRef # The ability to perform an Action over an Asset. Policy: URIRef # A non-empty group of Permissions and/or Prohibitions. Privacy: URIRef # A Policy that expresses a Rule over an Asset containing personal information. Prohibition: URIRef # The inability to perform an Action over an Asset. Request: URIRef # A Policy that proposes a Rule over an Asset from an assignee. RightOperand: URIRef # Right operand for constraint expression. Rule: URIRef # An abstract concept that represents the common characteristics of Permissions, Prohibitions, and Duties. Set: URIRef # A Policy that expresses a Rule over an Asset. Ticket: URIRef # A Policy that grants the holder a Rule over an Asset from an assigner. UndefinedTerm: URIRef # Is used to indicate how to support Actions that are not part of any vocabulary or profile in the policy expression system. acceptTracking: URIRef # To accept that the use of the Asset may be tracked. adHocShare: URIRef # The act of sharing the asset to parties in close proximity to the owner. aggregate: URIRef # To use the Asset or parts of it as part of a composite collection. annotate: URIRef # To add explanatory notations/commentaries to the Asset without modifying the Asset in any other way. anonymize: URIRef # To anonymize all or parts of the Asset. append: URIRef # The act of adding to the end of an asset. appendTo: URIRef # The act of appending data to the Asset without modifying the Asset in any other way. archive: URIRef # To store the Asset (in a non-transient form). attachPolicy: URIRef # The act of keeping the policy notice with the asset. attachSource: URIRef # The act of attaching the source of the asset and its derivatives. attribute: URIRef # To attribute the use of the Asset. commercialize: URIRef # The act of using the asset in a business environment. compensate: URIRef # To compensate by transfer of some amount of value, if defined, for using or selling the Asset. concurrentUse: URIRef # To create multiple copies of the Asset that are being concurrently used. copy: URIRef # The act of making an exact reproduction of the asset. core: URIRef # Identifier for the ODRL Core Profile delete: URIRef # To permanently remove all copies of the Asset after it has been used. derive: URIRef # To create a new derivative Asset from this Asset and to edit or modify the derivative. digitize: URIRef # To produce a digital copy of (or otherwise digitize) the Asset from its analogue form. display: URIRef # To create a static and transient rendition of an Asset. distribute: URIRef # To supply the Asset to third-parties. ensureExclusivity: URIRef # To ensure that the Rule on the Asset is exclusive. execute: URIRef # To run the computer program Asset. export: URIRef # The act of transforming the asset into a new form. extract: URIRef # To extract parts of the Asset and to use it as a new Asset. extractChar: URIRef # The act of extracting (replicating) unchanged characters from the asset. extractPage: URIRef # The act of extracting (replicating) unchanged pages from the asset. extractWord: URIRef # The act of extracting (replicating) unchanged words from the asset. give: URIRef # To transfer the ownership of the Asset to a third party without compensation and while deleting the original asset. grantUse: URIRef # To grant the use of the Asset to third parties. include: URIRef # To include other related assets in the Asset. index: URIRef # To record the Asset in an index. inform: URIRef # To inform that an action has been performed on or in relation to the Asset. install: URIRef # To load the computer program Asset onto a storage device which allows operating or running the Asset. lease: URIRef # The act of making available the asset to a third-party for a fixed period of time with exchange of value. lend: URIRef # The act of making available the asset to a third-party for a fixed period of time without exchange of value. license: URIRef # The act of granting the right to use the asset to a third-party. modify: URIRef # To change existing content of the Asset. A new asset is not created by this action. move: URIRef # To move the Asset from one digital location to another including deleting the original copy. nextPolicy: URIRef # To grant the specified Policy to a third party for their use of the Asset. obtainConsent: URIRef # To obtain verifiable consent to perform the requested action in relation to the Asset. pay: URIRef # The act of paying a financial amount to a party for use of the asset. play: URIRef # To create a sequential and transient rendition of an Asset. present: URIRef # To publicly perform the Asset. preview: URIRef # The act of providing a short preview of the asset. print: URIRef # To create a tangible and permanent rendition of an Asset. read: URIRef # To obtain data from the Asset. reproduce: URIRef # To make duplicate copies the Asset in any material form. reviewPolicy: URIRef # To review the Policy applicable to the Asset. secondaryUse: URIRef # The act of using the asset for a purpose other than the purpose it was intended for. sell: URIRef # To transfer the ownership of the Asset to a third party with compensation and while deleting the original asset. share: URIRef # The act of the non-commercial reproduction and distribution of the asset to third-parties. shareAlike: URIRef # The act of distributing any derivative asset under the same terms as the original asset. stream: URIRef # To deliver the Asset in real-time. synchronize: URIRef # To use the Asset in timed relations with media (audio/visual) elements of another Asset. textToSpeech: URIRef # To have a text Asset read out loud. transfer: URIRef # To transfer the ownership of the Asset in perpetuity. transform: URIRef # To convert the Asset into a different format. translate: URIRef # To translate the original natural language of an Asset into another natural language. uninstall: URIRef # To unload and delete the computer program Asset from a storage device and disable its readiness for operation. use: URIRef # To use the Asset watermark: URIRef # To apply a watermark to the Asset. write: URIRef # The act of writing to the Asset. writeTo: URIRef # The act of adding data to the Asset. # Valid non-python identifiers _extras = [ "and", "or", "#actionConcepts", "#actions", "#actionsCommon", "#assetConcepts", "#assetParty", "#assetRelations", "#assetRelationsCommon", "#conflictConcepts", "#constraintLeftOperandCommon", "#constraintLogicalOperands", "#constraintRelationalOperators", "#constraintRightOpCommon", "#constraints", "#deprecatedTerms", "#duties", "#logicalConstraints", "#partyConcepts", "#partyRoles", "#partyRolesCommon", "#permissions", "#policyConcepts", "#policySubClasses", "#policySubClassesCommon", "#prohibitions", "#ruleConcepts", ] _NS = Namespace("http://www.w3.org/ns/odrl/2/") rdflib-6.1.1/rdflib/namespace/_ORG.py000066400000000000000000000316271415774155300173440ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class ORG(DefinedNamespace): """ Core organization ontology Vocabulary for describing organizational structures, specializable to a broad variety of types of organization. Generated from: http://www.w3.org/ns/org# Date: 2020-05-26 14:20:02.908408 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property basedAt: URIRef # Indicates the site at which a person is based. We do not restrict the possibility that a person is based at multiple sites. changedBy: URIRef # Indicates a change event which resulted in a change to this organization. Depending on the event the organization may or may not have continued to exist after the event. Inverse of `org:originalOrganization`. classification: URIRef # Indicates a classification for this Organization within some classification scheme. Extension vocabularies may wish to specialize this property to have a range corresponding to a specific `skos:ConceptScheme`. This property is under discussion and may be revised or removed - in many cases organizations are best categorized by defining a sub-class hierarchy in an extension vocabulary. hasMember: URIRef # Indicates a person who is a member of the subject Organization. Inverse of `org:memberOf`, see that property for further clarification. Provided for compatibility with `foaf:member`. hasMembership: URIRef # Indicates a membership relationship that the Agent plays. Inverse of `org:member`. hasPost: URIRef # Indicates a Post which exists within the Organization. hasPrimarySite: URIRef # Indicates a primary site for the Organization, this is the default means by which an Organization can be contacted and is not necessarily the formal headquarters. hasRegisteredSite: URIRef # Indicates the legally registered site for the organization, in many legal jurisdictions there is a requirement that FormalOrganizations such as Companies or Charities have such a primary designed site. hasSite: URIRef # Indicates a site at which the Organization has some presence even if only indirect (e.g. virtual office or a professional service which is acting as the registered address for a company). Inverse of `org:siteOf`. hasSubOrganization: URIRef # Represents hierarchical containment of Organizations or Organizational Units; indicates an organization which is a sub-part or child of this organization. Inverse of `org:subOrganizationOf`. hasUnit: URIRef # Indicates a unit which is part of this Organization, e.g. a Department within a larger FormalOrganization. Inverse of `org:unitOf`. headOf: URIRef # Indicates that a person is the leader or formal head of the Organization. This will normally mean that they are the root of the `org:reportsTo` (acyclic) graph, though an organization may have more than one head. heldBy: URIRef # Indicates an Agent which holds a Post. holds: URIRef # Indicates a Post held by some Agent. identifier: URIRef # Gives an identifier, such as a company registration number, that can be used to used to uniquely identify the organization. Many different national and international identier schemes are available. The org ontology is neutral to which schemes are used. The particular identifier scheme should be indicated by the datatype of the identifier value. Using datatypes to distinguish the notation scheme used is consistent with recommended best practice for `skos:notation` of which this property is a specialization. linkedTo: URIRef # Indicates an arbitrary relationship between two organizations. Specializations of this can be used to, for example, denote funding or supply chain relationships. location: URIRef # Gives a location description for a person within the organization, for example a _Mail Stop_ for internal posting purposes. member: URIRef # Indicates the Person (or other Agent including Organization) involved in the Membership relationship. Inverse of `org:hasMembership` memberDuring: URIRef # Optional property to indicate the interval for which the membership is/was valid. memberOf: URIRef # Indicates that a person is a member of the Organization with no indication of the nature of that membership or the role played. Note that the choice of property name is not meant to limit the property to only formal membership arrangements, it is also intended to cover related concepts such as affilliation or other involvement in the organization. Extensions can specialize this relationship to indicate particular roles within the organization or more nuanced relationships to the organization. Has an optional inverse, `org:hasmember`. organization: URIRef # Indicates Organization in which the Agent is a member. originalOrganization: URIRef # Indicates one or more organizations that existed before the change event. Depending on the event they may or may not have continued to exist after the event. Inverse of `org:changedBy`. postIn: URIRef # Indicates the Organization in which the Post exists. purpose: URIRef # Indicates the purpose of this Organization. There can be many purposes at different levels of abstraction but the nature of an organization is to have a reason for existence and this property is a means to document that reason. An Organization may have multiple purposes. It is recommended that the purpose be denoted by a controlled term or code list, ideally a `skos:Concept`. However, the range is left open to allow for other types of descriptive schemes. It is expected that specializations or application profiles of this vocabulary will constrain the range of the purpose. Alternative names: _remit_ _responsibility_ (esp. if applied to OrganizationalUnits such as Government Departments). remuneration: URIRef # Indicates a salary or other reward associated with the role. Typically this will be denoted using an existing representation scheme such as `gr:PriceSpecification` but the range is left open to allow applications to specialize it (e.g. to remunerationInGBP). reportsTo: URIRef # Indicates a reporting relationship as might be depicted on an organizational chart. The precise semantics of the reporting relationship will vary by organization but is intended to encompass both direct supervisory relationships (e.g. carrying objective and salary setting authority) and more general reporting or accountability relationships (e.g. so called _dotted line_ reporting). resultedFrom: URIRef # Indicates an event which resulted in this organization. Inverse of `org:resultingOrganization`. resultingOrganization: URIRef # Indicates an organization which was created or changed as a result of the event. Inverse of `org:resultedFrom`. role: URIRef # Indicates the Role that the Agent plays in a Membership relationship with an Organization. roleProperty: URIRef # This is a metalevel property which is used to annotate an `org:Role` instance with a sub-property of `org:memberOf` that can be used to directly indicate the role for easy of query. The intended semantics is a Membership relation involving the Role implies the existence of a direct property relationship through an inference rule of the form: `{ [] org:member ?p; org:organization ?o; org:role [org:roleProperty ?r] } -> {?p ?r ?o}`. siteAddress: URIRef # Indicates an address for the site in a suitable encoding. Use of vCard (using the http://www.w3.org/TR/vcard-rdf/ vocabulary) is encouraged but the range is left open to allow other encodings to be used. The address may include email, telephone, and geo-location information and is not restricted to a physical address. siteOf: URIRef # Indicates an Organization which has some presence at the given site. This is the inverse of `org:hasSite`. subOrganizationOf: URIRef # Represents hierarchical containment of Organizations or OrganizationalUnits; indicates an Organization which contains this Organization. Inverse of `org:hasSubOrganization`. transitiveSubOrganizationOf: URIRef # The transitive closure of subOrganizationOf, giving a representation of all organizations that contain this one. Note that technically this is a super property of the transitive closure so it could contain additional assertions but such usage is discouraged. unitOf: URIRef # Indicates an Organization of which this Unit is a part, e.g. a Department within a larger FormalOrganization. This is the inverse of `org:hasUnit`. # http://www.w3.org/2000/01/rdf-schema#Class ChangeEvent: URIRef # Represents an event which resulted in a major change to an organization such as a merger or complete restructuring. It is intended for situations where the resulting organization is sufficient distinct from the original organizations that it has a distinct identity and distinct URI. Extension vocabularies should define sub-classes of this to denote particular categories of event. The instant or interval at which the event occurred should be given by `prov:startAtTime` and `prov:endedAtTime`, a description should be given by `dct:description`. FormalOrganization: URIRef # An Organization which is recognized in the world at large, in particular in legal jurisdictions, with associated rights and responsibilities. Examples include a Corporation, Charity, Government or Church. Note that this is a super class of `gr:BusinessEntity` and it is recommended to use the GoodRelations vocabulary to denote Business classifications such as DUNS or NAICS. Membership: URIRef # Indicates the nature of an Agent's membership of an organization. Represents an n-ary relation between an Agent, an Organization and a Role. It is possible to directly indicate membership, independent of the specific Role, through use of the `org:memberOf` property. Organization: URIRef # Represents a collection of people organized together into a community or other social, commercial or political structure. The group has some common purpose or reason for existence which goes beyond the set of people belonging to it and can act as an Agent. Organizations are often decomposable into hierarchical structures. It is recommended that SKOS lexical labels should be used to label the Organization. In particular `skos:prefLabel` for the primary (possibly legally recognized name), `skos:altLabel` for alternative names (trading names, colloquial names) and `skos:notation` to denote a code from a code list. Alternative names: _Collective_ _Body_ _Org_ _Group_ OrganizationalCollaboration: URIRef # A collaboration between two or more Organizations such as a project. It meets the criteria for being an Organization in that it has an identity and defining purpose independent of its particular members but is neither a formally recognized legal entity nor a sub-unit within some larger organization. Might typically have a shorter lifetime than the Organizations within it, but not necessarily. All members are `org:Organization`s rather than individuals and those Organizations can play particular roles within the venture. Alternative names: _Project_ _Venture_ _Endeavour_ _Consortium_ _Endeavour_ OrganizationalUnit: URIRef # An Organization such as a University Support Unit which is part of some larger FormalOrganization and only has full recognition within the context of that FormalOrganization, it is not a Legal Entity in its own right. Units can be large and complex containing other Units and even FormalOrganizations. Alternative names: _OU_ _Unit_ _Department_ Post: URIRef # A Post represents some position within an organization that exists independently of the person or persons filling it. Posts may be used to represent situations where a person is a member of an organization ex officio (for example the Secretary of State for Scotland is part of UK Cabinet by virtue of being Secretary of State for Scotland, not as an individual person). A post can be held by multiple people and hence can be treated as a organization in its own right. Role: URIRef # Denotes a role that a Person or other Agent can take in an organization. Instances of this class describe the abstract role; to denote a specific instance of a person playing that role in a specific organization use an instance of `org:Membership`. It is common for roles to be arranged in some taxonomic structure and we use SKOS to represent that. The normal SKOS lexical properties should be used when labelling the Role. Additional descriptive properties for the Role, such as a Salary band, may be added by extension vocabularies. Site: URIRef # An office or other premise at which the organization is located. Many organizations are spread across multiple sites and many sites will host multiple locations. In most cases a Site will be a physical location. However, we don't exclude the possibility of non-physical sites such as a virtual office with an associated post box and phone reception service. Extensions may provide subclasses to denote particular types of site. # http://www.w3.org/ns/org#Role Head: URIRef # head _NS = Namespace("http://www.w3.org/ns/org#") rdflib-6.1.1/rdflib/namespace/_OWL.py000066400000000000000000000241601415774155300173500ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class OWL(DefinedNamespace): """ The OWL 2 Schema vocabulary (OWL 2) This ontology partially describes the built-in classes and properties that together form the basis of the RDF/XML syntax of OWL 2. The content of this ontology is based on Tables 6.1 and 6.2 in Section 6.4 of the OWL 2 RDF-Based Semantics specification, available at http://www.w3.org/TR/owl2-rdf-based- semantics/. Please note that those tables do not include the different annotations (labels, comments and rdfs:isDefinedBy links) used in this file. Also note that the descriptions provided in this ontology do not provide a complete and correct formal description of either the syntax or the semantics of the introduced terms (please see the OWL 2 recommendations for the complete and normative specifications). Furthermore, the information provided by this ontology may be misleading if not used with care. This ontology SHOULD NOT be imported into OWL ontologies. Importing this file into an OWL 2 DL ontology will cause it to become an OWL 2 Full ontology and may have other, unexpected, consequences. Generated from: http://www.w3.org/2002/07/owl# Date: 2020-05-26 14:20:03.193795 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property allValuesFrom: URIRef # The property that determines the class that a universal property restriction refers to. annotatedProperty: URIRef # The property that determines the predicate of an annotated axiom or annotated annotation. annotatedSource: URIRef # The property that determines the subject of an annotated axiom or annotated annotation. annotatedTarget: URIRef # The property that determines the object of an annotated axiom or annotated annotation. assertionProperty: URIRef # The property that determines the predicate of a negative property assertion. cardinality: URIRef # The property that determines the cardinality of an exact cardinality restriction. complementOf: URIRef # The property that determines that a given class is the complement of another class. datatypeComplementOf: URIRef # The property that determines that a given data range is the complement of another data range with respect to the data domain. differentFrom: URIRef # The property that determines that two given individuals are different. disjointUnionOf: URIRef # The property that determines that a given class is equivalent to the disjoint union of a collection of other classes. disjointWith: URIRef # The property that determines that two given classes are disjoint. distinctMembers: URIRef # The property that determines the collection of pairwise different individuals in a owl:AllDifferent axiom. equivalentClass: URIRef # The property that determines that two given classes are equivalent, and that is used to specify datatype definitions. equivalentProperty: URIRef # The property that determines that two given properties are equivalent. hasKey: URIRef # The property that determines the collection of properties that jointly build a key. hasSelf: URIRef # The property that determines the property that a self restriction refers to. hasValue: URIRef # The property that determines the individual that a has-value restriction refers to. intersectionOf: URIRef # The property that determines the collection of classes or data ranges that build an intersection. inverseOf: URIRef # The property that determines that two given properties are inverse. maxCardinality: URIRef # The property that determines the cardinality of a maximum cardinality restriction. maxQualifiedCardinality: URIRef # The property that determines the cardinality of a maximum qualified cardinality restriction. members: URIRef # The property that determines the collection of members in either a owl:AllDifferent, owl:AllDisjointClasses or owl:AllDisjointProperties axiom. minCardinality: URIRef # The property that determines the cardinality of a minimum cardinality restriction. minQualifiedCardinality: URIRef # The property that determines the cardinality of a minimum qualified cardinality restriction. onClass: URIRef # The property that determines the class that a qualified object cardinality restriction refers to. onDataRange: URIRef # The property that determines the data range that a qualified data cardinality restriction refers to. onDatatype: URIRef # The property that determines the datatype that a datatype restriction refers to. onProperties: URIRef # The property that determines the n-tuple of properties that a property restriction on an n-ary data range refers to. onProperty: URIRef # The property that determines the property that a property restriction refers to. oneOf: URIRef # The property that determines the collection of individuals or data values that build an enumeration. propertyChainAxiom: URIRef # The property that determines the n-tuple of properties that build a sub property chain of a given property. propertyDisjointWith: URIRef # The property that determines that two given properties are disjoint. qualifiedCardinality: URIRef # The property that determines the cardinality of an exact qualified cardinality restriction. sameAs: URIRef # The property that determines that two given individuals are equal. someValuesFrom: URIRef # The property that determines the class that an existential property restriction refers to. sourceIndividual: URIRef # The property that determines the subject of a negative property assertion. targetIndividual: URIRef # The property that determines the object of a negative object property assertion. targetValue: URIRef # The property that determines the value of a negative data property assertion. unionOf: URIRef # The property that determines the collection of classes or data ranges that build a union. withRestrictions: URIRef # The property that determines the collection of facet-value pairs that define a datatype restriction. # http://www.w3.org/2000/01/rdf-schema#Class AllDifferent: URIRef # The class of collections of pairwise different individuals. AllDisjointClasses: URIRef # The class of collections of pairwise disjoint classes. AllDisjointProperties: URIRef # The class of collections of pairwise disjoint properties. Annotation: URIRef # The class of annotated annotations for which the RDF serialization consists of an annotated subject, predicate and object. AnnotationProperty: URIRef # The class of annotation properties. AsymmetricProperty: URIRef # The class of asymmetric properties. Axiom: URIRef # The class of annotated axioms for which the RDF serialization consists of an annotated subject, predicate and object. Class: URIRef # The class of OWL classes. DataRange: URIRef # The class of OWL data ranges, which are special kinds of datatypes. Note: The use of the IRI owl:DataRange has been deprecated as of OWL 2. The IRI rdfs:Datatype SHOULD be used instead. DatatypeProperty: URIRef # The class of data properties. DeprecatedClass: URIRef # The class of deprecated classes. DeprecatedProperty: URIRef # The class of deprecated properties. FunctionalProperty: URIRef # The class of functional properties. InverseFunctionalProperty: URIRef # The class of inverse-functional properties. IrreflexiveProperty: URIRef # The class of irreflexive properties. NamedIndividual: URIRef # The class of named individuals. NegativePropertyAssertion: URIRef # The class of negative property assertions. ObjectProperty: URIRef # The class of object properties. Ontology: URIRef # The class of ontologies. OntologyProperty: URIRef # The class of ontology properties. ReflexiveProperty: URIRef # The class of reflexive properties. Restriction: URIRef # The class of property restrictions. SymmetricProperty: URIRef # The class of symmetric properties. TransitiveProperty: URIRef # The class of transitive properties. # http://www.w3.org/2002/07/owl#AnnotationProperty backwardCompatibleWith: URIRef # The annotation property that indicates that a given ontology is backward compatible with another ontology. deprecated: URIRef # The annotation property that indicates that a given entity has been deprecated. incompatibleWith: URIRef # The annotation property that indicates that a given ontology is incompatible with another ontology. priorVersion: URIRef # The annotation property that indicates the predecessor ontology of a given ontology. versionInfo: URIRef # The annotation property that provides version information for an ontology or another OWL construct. # http://www.w3.org/2002/07/owl#Class Nothing: URIRef # This is the empty class. Thing: URIRef # The class of OWL individuals. # http://www.w3.org/2002/07/owl#DatatypeProperty bottomDataProperty: URIRef # The data property that does not relate any individual to any data value. topDataProperty: URIRef # The data property that relates every individual to every data value. # http://www.w3.org/2002/07/owl#ObjectProperty bottomObjectProperty: URIRef # The object property that does not relate any two individuals. topObjectProperty: URIRef # The object property that relates every two individuals. # http://www.w3.org/2002/07/owl#OntologyProperty imports: URIRef # The property that is used for importing other ontologies into a given ontology. versionIRI: URIRef # The property that identifies the version IRI of an ontology. # http://www.w3.org/2000/01/rdf-schema#Datatype # NOTE: the following two elements don't appear in the OWL RDF documents but are defined in the OWL2 Recommentation # at https://www.w3.org/TR/owl2-syntax/#Datatype_Maps rational: URIRef # The value space is the set of all rational numbers. The lexical form is numerator '/' denominator, where both are integers. real: URIRef # The value space is the set of all real numbers. Does not directly provide any lexical forms. _NS = Namespace("http://www.w3.org/2002/07/owl#") rdflib-6.1.1/rdflib/namespace/_PROF.py000066400000000000000000000053671415774155300174650ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class PROF(DefinedNamespace): """ Profiles Vocabulary This vocabulary is for describing relationships between standards/specifications, profiles of them and supporting artifacts such as validating resources. This model starts with [http://dublincore.org/2012/06/14/dcterms#Standard](dct:Standard) entities which can either be Base Specifications (a standard not profiling any other Standard) or Profiles (Standards which do profile others). Base Specifications or Profiles can have Resource Descriptors associated with them that defines implementing rules for the it. Resource Descriptors must indicate the role they play (to guide, to validate etc.) and the formalism they adhere to (dct:format) to allow for content negotiation. A vocabulary of Resource Roles are provided alongside this vocabulary but that list is extensible. Generated from: https://www.w3.org/ns/dx/prof/profilesont.ttl Date: 2020-05-26 14:20:03.542924 """ # http://www.w3.org/2002/07/owl#Class Profile: URIRef # A named set of constraints on one or more identified base specifications or other profiles, including the identification of any implementing subclasses of datatypes, semantic interpretations, vocabularies, options and parameters of those base specifications necessary to accomplish a particular function. This definition includes what are often called "application profiles", "metadata application profiles", or "metadata profiles". ResourceDescriptor: URIRef # A resource that defines an aspect - a particular part or feature - of a Profile ResourceRole: URIRef # The role that an Resource plays # http://www.w3.org/2002/07/owl#DatatypeProperty hasToken: URIRef # A preferred alternative identifier for the Profile # http://www.w3.org/2002/07/owl#ObjectProperty hasArtifact: URIRef # The URL of a downloadable file with particulars such as its format and role indicated by a Resource Descriptor hasResource: URIRef # A resource which describes the nature of an artifact and the role it plays in relation to a profile hasRole: URIRef # The function of the described artifactresource in the expression of the Profile, such as a specification, guidance documentation, SHACL file etc. isInheritedFrom: URIRef # This property indicates a Resource Descriptor described by this Profile’s base specification that is to be considered a Resource Descriptor for this Profile also isProfileOf: URIRef # A Profile is a profile of a dct:Standard (or a Base Specification or another Profile) isTransitiveProfileOf: URIRef # A base specification an Profile conforms to _NS = Namespace("http://www.w3.org/ns/dx/prof/") rdflib-6.1.1/rdflib/namespace/_PROV.py000066400000000000000000000570041415774155300175000ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class PROV(DefinedNamespace): """ W3C PROVenance Interchange Ontology (PROV-O) This document is published by the Provenance Working Group (http://www.w3.org/2011/prov/wiki/Main_Page). If you wish to make comments regarding this document, please send them to public-prov-comments@w3.org (subscribe public-prov-comments-request@w3.org, archives http://lists.w3.org/Archives/Public/public-prov-comments/). All feedback is welcome. PROV Access and Query Ontology This document is published by the Provenance Working Group (http://www.w3.org/2011/prov/wiki/Main_Page). If you wish to make comments regarding this document, please send them to public-prov-comments@w3.org (subscribe public-prov-comments-request@w3.org, archives http://lists.w3.org/Archives/Public/public-prov-comments/). All feedback is welcome. Dublin Core extensions of the W3C PROVenance Interchange Ontology (PROV-O) This document is published by the Provenance Working Group (http://www.w3.org/2011/prov/wiki/Main_Page). If you wish to make comments regarding this document, please send them to public-prov-comments@w3.org (subscribe public-prov-comments-request@w3.org, archives http://lists.w3.org/Archives/Public/public-prov-comments/). All feedback is welcome. W3C PROV Linking Across Provenance Bundles Ontology (PROV-LINKS) This document is published by the Provenance Working Group (http://www.w3.org/2011/prov/wiki/Main_Page). If you wish to make comments regarding this document, please send them to public-prov-comments@w3.org (subscribe public-prov-comments-request@w3.org, archives http://lists.w3.org/Archives/Public/public-prov-comments/ ). All feedback is welcome. W3C PROVenance Interchange Ontology (PROV-O) Dictionary Extension This document is published by the Provenance Working Group (http://www.w3.org/2011/prov/wiki/Main_Page). If you wish to make comments regarding this document, please send them to public-prov-comments@w3.org (subscribe public-prov-comments-request@w3.org, archives http://lists.w3.org/Archives/Public/public-prov- comments/). All feedback is welcome. W3C PROVenance Interchange This document is published by the Provenance Working Group (http://www.w3.org/2011/prov/wiki/Main_Page). If you wish to make comments regarding this document, please send them to public-prov-comments@w3.org (subscribe public-prov-comments-request@w3.org, archives http://lists.w3.org/ Archives/Public/public-prov-comments/). All feedback is welcome. Generated from: http://www.w3.org/ns/prov Date: 2020-05-26 14:20:04.650279 """ _fail = True # http://www.w3.org/2000/01/rdf-schema#Resource activityOfInfluence: URIRef # activityOfInfluence agentOfInfluence: URIRef # agentOfInfluence contributed: URIRef # contributed ended: URIRef # ended entityOfInfluence: URIRef # entityOfInfluence generalizationOf: URIRef # generalizationOf generatedAsDerivation: URIRef # generatedAsDerivation hadDelegate: URIRef # hadDelegate hadDerivation: URIRef # hadDerivation hadInfluence: URIRef # hadInfluence hadRevision: URIRef # hadRevision informed: URIRef # informed locationOf: URIRef # locationOf qualifiedAssociationOf: URIRef # qualifiedAssociationOf qualifiedAttributionOf: URIRef # qualifiedAttributionOf qualifiedCommunicationOf: URIRef # qualifiedCommunicationOf qualifiedDelegationOf: URIRef # qualifiedDelegationOf qualifiedDerivationOf: URIRef # qualifiedDerivationOf qualifiedEndOf: URIRef # qualifiedEndOf qualifiedGenerationOf: URIRef # qualifiedGenerationOf qualifiedInfluenceOf: URIRef # qualifiedInfluenceOf qualifiedInvalidationOf: URIRef # qualifiedInvalidationOf qualifiedQuotationOf: URIRef # qualifiedQuotationOf qualifiedSourceOf: URIRef # qualifiedSourceOf qualifiedStartOf: URIRef # qualifiedStartOf qualifiedUsingActivity: URIRef # qualifiedUsingActivity quotedAs: URIRef # quotedAs revisedEntity: URIRef # revisedEntity started: URIRef # started wasActivityOfInfluence: URIRef # wasActivityOfInfluence wasAssociateFor: URIRef # wasAssociateFor wasMemberOf: URIRef # wasMemberOf wasPlanOf: URIRef # wasPlanOf wasPrimarySourceOf: URIRef # wasPrimarySourceOf wasRoleIn: URIRef # wasRoleIn wasUsedBy: URIRef # wasUsedBy wasUsedInDerivation: URIRef # wasUsedInDerivation # http://www.w3.org/2002/07/owl#AnnotationProperty aq: URIRef # category: URIRef # Classify prov-o terms into three categories, including 'starting-point', 'qualifed', and 'extended'. This classification is used by the prov-o html document to gently introduce prov-o terms to its users. component: URIRef # Classify prov-o terms into six components according to prov-dm, including 'agents-responsibility', 'alternate', 'annotations', 'collections', 'derivations', and 'entities-activities'. This classification is used so that readers of prov-o specification can find its correspondence with the prov-dm specification. constraints: URIRef # A reference to the principal section of the PROV-CONSTRAINTS document that describes this concept. definition: URIRef # A definition quoted from PROV-DM or PROV-CONSTRAINTS that describes the concept expressed with this OWL term. dm: URIRef # A reference to the principal section of the PROV-DM document that describes this concept. editorialNote: URIRef # A note by the OWL development team about how this term expresses the PROV-DM concept, or how it should be used in context of semantic web or linked data. editorsDefinition: URIRef # When the prov-o term does not have a definition drawn from prov-dm, and the prov-o editor provides one. inverse: URIRef # PROV-O does not define all property inverses. The directionalities defined in PROV-O should be given preference over those not defined. However, if users wish to name the inverse of a PROV-O property, the local name given by prov:inverse should be used. n: URIRef # A reference to the principal section of the PROV-M document that describes this concept. order: URIRef # The position that this OWL term should be listed within documentation. The scope of the documentation (e.g., among all terms, among terms within a prov:category, among properties applying to a particular class, etc.) is unspecified. qualifiedForm: URIRef # This annotation property links a subproperty of prov:wasInfluencedBy with the subclass of prov:Influence and the qualifying property that are used to qualify it. Example annotation: prov:wasGeneratedBy prov:qualifiedForm prov:qualifiedGeneration, prov:Generation . Then this unqualified assertion: :entity1 prov:wasGeneratedBy :activity1 . can be qualified by adding: :entity1 prov:qualifiedGeneration :entity1Gen . :entity1Gen a prov:Generation, prov:Influence; prov:activity :activity1; :customValue 1337 . Note how the value of the unqualified influence (prov:wasGeneratedBy :activity1) is mirrored as the value of the prov:activity (or prov:entity, or prov:agent) property on the influence class. sharesDefinitionWith: URIRef # specializationOf: URIRef # specializationOf todo: URIRef # unqualifiedForm: URIRef # Classes and properties used to qualify relationships are annotated with prov:unqualifiedForm to indicate the property used to assert an unqualified provenance relation. wasRevisionOf: URIRef # A revision is a derivation that revises an entity into a revised version. # http://www.w3.org/2002/07/owl#Class Accept: URIRef # Accept Activity: URIRef # Activity ActivityInfluence: URIRef # ActivityInfluence provides additional descriptions of an Activity's binary influence upon any other kind of resource. Instances of ActivityInfluence use the prov:activity property to cite the influencing Activity. Agent: URIRef # Agent AgentInfluence: URIRef # AgentInfluence provides additional descriptions of an Agent's binary influence upon any other kind of resource. Instances of AgentInfluence use the prov:agent property to cite the influencing Agent. Association: URIRef # An instance of prov:Association provides additional descriptions about the binary prov:wasAssociatedWith relation from an prov:Activity to some prov:Agent that had some responsibility for it. For example, :baking prov:wasAssociatedWith :baker; prov:qualifiedAssociation [ a prov:Association; prov:agent :baker; :foo :bar ]. Attribution: URIRef # An instance of prov:Attribution provides additional descriptions about the binary prov:wasAttributedTo relation from an prov:Entity to some prov:Agent that had some responsible for it. For example, :cake prov:wasAttributedTo :baker; prov:qualifiedAttribution [ a prov:Attribution; prov:entity :baker; :foo :bar ]. Bundle: URIRef # Note that there are kinds of bundles (e.g. handwritten letters, audio recordings, etc.) that are not expressed in PROV-O, but can be still be described by PROV-O. Collection: URIRef # Collection Communication: URIRef # An instance of prov:Communication provides additional descriptions about the binary prov:wasInformedBy relation from an informed prov:Activity to the prov:Activity that informed it. For example, :you_jumping_off_bridge prov:wasInformedBy :everyone_else_jumping_off_bridge; prov:qualifiedCommunication [ a prov:Communication; prov:activity :everyone_else_jumping_off_bridge; :foo :bar ]. Contribute: URIRef # Contribute Contributor: URIRef # Contributor Copyright: URIRef # Copyright Create: URIRef # Create Creator: URIRef # Creator Delegation: URIRef # An instance of prov:Delegation provides additional descriptions about the binary prov:actedOnBehalfOf relation from a performing prov:Agent to some prov:Agent for whom it was performed. For example, :mixing prov:wasAssociatedWith :toddler . :toddler prov:actedOnBehalfOf :mother; prov:qualifiedDelegation [ a prov:Delegation; prov:entity :mother; :foo :bar ]. Derivation: URIRef # The more specific forms of prov:Derivation (i.e., prov:Revision, prov:Quotation, prov:PrimarySource) should be asserted if they apply. Dictionary: URIRef # This concept allows for the provenance of the dictionary, but also of its constituents to be expressed. Such a notion of dictionary corresponds to a wide variety of concrete data structures, such as a maps or associative arrays. DirectQueryService: URIRef # Type for a generic provenance query service. Mainly for use in RDF provenance query service descriptions, to facilitate discovery in linked data environments. EmptyDictionary: URIRef # Empty Dictionary End: URIRef # An instance of prov:End provides additional descriptions about the binary prov:wasEndedBy relation from some ended prov:Activity to an prov:Entity that ended it. For example, :ball_game prov:wasEndedBy :buzzer; prov:qualifiedEnd [ a prov:End; prov:entity :buzzer; :foo :bar; prov:atTime '2012-03-09T08:05:08-05:00'^^xsd:dateTime ]. Entity: URIRef # Entity EntityInfluence: URIRef # It is not recommended that the type EntityInfluence be asserted without also asserting one of its more specific subclasses. Generation: URIRef # An instance of prov:Generation provides additional descriptions about the binary prov:wasGeneratedBy relation from a generated prov:Entity to the prov:Activity that generated it. For example, :cake prov:wasGeneratedBy :baking; prov:qualifiedGeneration [ a prov:Generation; prov:activity :baking; :foo :bar ]. Influence: URIRef # Because prov:Influence is a broad relation, its most specific subclasses (e.g. prov:Communication, prov:Delegation, prov:End, prov:Revision, etc.) should be used when applicable. Insertion: URIRef # Insertion InstantaneousEvent: URIRef # An instantaneous event, or event for short, happens in the world and marks a change in the world, in its activities and in its entities. The term 'event' is commonly used in process algebra with a similar meaning. Events represent communications or interactions; they are assumed to be atomic and instantaneous. Invalidation: URIRef # An instance of prov:Invalidation provides additional descriptions about the binary prov:wasInvalidatedBy relation from an invalidated prov:Entity to the prov:Activity that invalidated it. For example, :uncracked_egg prov:wasInvalidatedBy :baking; prov:qualifiedInvalidation [ a prov:Invalidation; prov:activity :baking; :foo :bar ]. KeyEntityPair: URIRef # Key-Entity Pair Location: URIRef # Location Modify: URIRef # Modify Organization: URIRef # Organization Person: URIRef # Person Plan: URIRef # There exist no prescriptive requirement on the nature of plans, their representation, the actions or steps they consist of, or their intended goals. Since plans may evolve over time, it may become necessary to track their provenance, so plans themselves are entities. Representing the plan explicitly in the provenance can be useful for various tasks: for example, to validate the execution as represented in the provenance record, to manage expectation failures, or to provide explanations. PrimarySource: URIRef # An instance of prov:PrimarySource provides additional descriptions about the binary prov:hadPrimarySource relation from some secondary prov:Entity to an earlier, primary prov:Entity. For example, :blog prov:hadPrimarySource :newsArticle; prov:qualifiedPrimarySource [ a prov:PrimarySource; prov:entity :newsArticle; :foo :bar ] . Publish: URIRef # Publish Publisher: URIRef # Publisher Quotation: URIRef # An instance of prov:Quotation provides additional descriptions about the binary prov:wasQuotedFrom relation from some taken prov:Entity from an earlier, larger prov:Entity. For example, :here_is_looking_at_you_kid prov:wasQuotedFrom :casablanca_script; prov:qualifiedQuotation [ a prov:Quotation; prov:entity :casablanca_script; :foo :bar ]. Removal: URIRef # Removal Replace: URIRef # Replace Revision: URIRef # An instance of prov:Revision provides additional descriptions about the binary prov:wasRevisionOf relation from some newer prov:Entity to an earlier prov:Entity. For example, :draft_2 prov:wasRevisionOf :draft_1; prov:qualifiedRevision [ a prov:Revision; prov:entity :draft_1; :foo :bar ]. RightsAssignment: URIRef # RightsAssignment RightsHolder: URIRef # RightsHolder Role: URIRef # Role ServiceDescription: URIRef # Type for a generic provenance query service. Mainly for use in RDF provenance query service descriptions, to facilitate discovery in linked data environments. SoftwareAgent: URIRef # SoftwareAgent Start: URIRef # An instance of prov:Start provides additional descriptions about the binary prov:wasStartedBy relation from some started prov:Activity to an prov:Entity that started it. For example, :foot_race prov:wasStartedBy :bang; prov:qualifiedStart [ a prov:Start; prov:entity :bang; :foo :bar; prov:atTime '2012-03-09T08:05:08-05:00'^^xsd:dateTime ] . Submit: URIRef # Submit Usage: URIRef # An instance of prov:Usage provides additional descriptions about the binary prov:used relation from some prov:Activity to an prov:Entity that it used. For example, :keynote prov:used :podium; prov:qualifiedUsage [ a prov:Usage; prov:entity :podium; :foo :bar ]. # http://www.w3.org/2002/07/owl#DatatypeProperty atTime: URIRef # The time at which an InstantaneousEvent occurred, in the form of xsd:dateTime. endedAtTime: URIRef # The time at which an activity ended. See also prov:startedAtTime. generatedAtTime: URIRef # The time at which an entity was completely created and is available for use. invalidatedAtTime: URIRef # The time at which an entity was invalidated (i.e., no longer usable). provenanceUriTemplate: URIRef # Relates a provenance service to a URI template string for constructing provenance-URIs. removedKey: URIRef # removedKey startedAtTime: URIRef # The time at which an activity started. See also prov:endedAtTime. value: URIRef # value # http://www.w3.org/2002/07/owl#FunctionalProperty pairEntity: URIRef # pairKey pairKey: URIRef # pairKey # http://www.w3.org/2002/07/owl#NamedIndividual EmptyCollection: URIRef # EmptyCollection # http://www.w3.org/2002/07/owl#ObjectProperty actedOnBehalfOf: URIRef # An object property to express the accountability of an agent towards another agent. The subordinate agent acted on behalf of the responsible agent in an actual activity. activity: URIRef # activity agent: URIRef # agent alternateOf: URIRef # alternateOf asInBundle: URIRef # prov:asInBundle is used to specify which bundle the general entity of a prov:mentionOf property is described. When :x prov:mentionOf :y and :y is described in Bundle :b, the triple :x prov:asInBundle :b is also asserted to cite the Bundle in which :y was described. atLocation: URIRef # The Location of any resource. derivedByInsertionFrom: URIRef # derivedByInsertionFrom derivedByRemovalFrom: URIRef # derivedByRemovalFrom describesService: URIRef # relates a generic provenance query service resource (type prov:ServiceDescription) to a specific query service description (e.g. a prov:DirectQueryService or a sd:Service). dictionary: URIRef # dictionary entity: URIRef # entity generated: URIRef # generated hadActivity: URIRef # The _optional_ Activity of an Influence, which used, generated, invalidated, or was the responsibility of some Entity. This property is _not_ used by ActivityInfluence (use prov:activity instead). hadDictionaryMember: URIRef # hadDictionaryMember hadGeneration: URIRef # The _optional_ Generation involved in an Entity's Derivation. hadMember: URIRef # hadMember hadPlan: URIRef # The _optional_ Plan adopted by an Agent in Association with some Activity. Plan specifications are out of the scope of this specification. hadPrimarySource: URIRef # hadPrimarySource hadRole: URIRef # This property has multiple RDFS domains to suit multiple OWL Profiles. See PROV-O OWL Profile. hadUsage: URIRef # The _optional_ Usage involved in an Entity's Derivation. has_anchor: URIRef # Indicates anchor URI for a potentially dynamic resource instance. has_provenance: URIRef # Indicates a provenance-URI for a resource; the resource identified by this property presents a provenance record about its subject or anchor resource. has_query_service: URIRef # Indicates a provenance query service that can access provenance related to its subject or anchor resource. influenced: URIRef # influenced influencer: URIRef # Subproperties of prov:influencer are used to cite the object of an unqualified PROV-O triple whose predicate is a subproperty of prov:wasInfluencedBy (e.g. prov:used, prov:wasGeneratedBy). prov:influencer is used much like rdf:object is used. insertedKeyEntityPair: URIRef # insertedKeyEntityPair invalidated: URIRef # invalidated mentionOf: URIRef # prov:mentionOf is used to specialize an entity as described in another bundle. It is to be used in conjunction with prov:asInBundle. prov:asInBundle is used to cite the Bundle in which the generalization was mentioned. pingback: URIRef # Relates a resource to a provenance pingback service that may receive additional provenance links about the resource. qualifiedAssociation: URIRef # If this Activity prov:wasAssociatedWith Agent :ag, then it can qualify the Association using prov:qualifiedAssociation [ a prov:Association; prov:agent :ag; :foo :bar ]. qualifiedAttribution: URIRef # If this Entity prov:wasAttributedTo Agent :ag, then it can qualify how it was influenced using prov:qualifiedAttribution [ a prov:Attribution; prov:agent :ag; :foo :bar ]. qualifiedCommunication: URIRef # If this Activity prov:wasInformedBy Activity :a, then it can qualify how it was influenced using prov:qualifiedCommunication [ a prov:Communication; prov:activity :a; :foo :bar ]. qualifiedDelegation: URIRef # If this Agent prov:actedOnBehalfOf Agent :ag, then it can qualify how with prov:qualifiedResponsibility [ a prov:Responsibility; prov:agent :ag; :foo :bar ]. qualifiedDerivation: URIRef # If this Entity prov:wasDerivedFrom Entity :e, then it can qualify how it was derived using prov:qualifiedDerivation [ a prov:Derivation; prov:entity :e; :foo :bar ]. qualifiedEnd: URIRef # If this Activity prov:wasEndedBy Entity :e1, then it can qualify how it was ended using prov:qualifiedEnd [ a prov:End; prov:entity :e1; :foo :bar ]. qualifiedGeneration: URIRef # If this Activity prov:generated Entity :e, then it can qualify how it performed the Generation using prov:qualifiedGeneration [ a prov:Generation; prov:entity :e; :foo :bar ]. qualifiedInfluence: URIRef # Because prov:qualifiedInfluence is a broad relation, the more specific relations (qualifiedCommunication, qualifiedDelegation, qualifiedEnd, etc.) should be used when applicable. qualifiedInsertion: URIRef # qualifiedInsertion qualifiedInvalidation: URIRef # If this Entity prov:wasInvalidatedBy Activity :a, then it can qualify how it was invalidated using prov:qualifiedInvalidation [ a prov:Invalidation; prov:activity :a; :foo :bar ]. qualifiedPrimarySource: URIRef # If this Entity prov:hadPrimarySource Entity :e, then it can qualify how using prov:qualifiedPrimarySource [ a prov:PrimarySource; prov:entity :e; :foo :bar ]. qualifiedQuotation: URIRef # If this Entity prov:wasQuotedFrom Entity :e, then it can qualify how using prov:qualifiedQuotation [ a prov:Quotation; prov:entity :e; :foo :bar ]. qualifiedRemoval: URIRef # qualifiedRemoval qualifiedRevision: URIRef # If this Entity prov:wasRevisionOf Entity :e, then it can qualify how it was revised using prov:qualifiedRevision [ a prov:Revision; prov:entity :e; :foo :bar ]. qualifiedStart: URIRef # If this Activity prov:wasStartedBy Entity :e1, then it can qualify how it was started using prov:qualifiedStart [ a prov:Start; prov:entity :e1; :foo :bar ]. qualifiedUsage: URIRef # If this Activity prov:used Entity :e, then it can qualify how it used it using prov:qualifiedUsage [ a prov:Usage; prov:entity :e; :foo :bar ]. used: URIRef # A prov:Entity that was used by this prov:Activity. For example, :baking prov:used :spoon, :egg, :oven . wasAssociatedWith: URIRef # An prov:Agent that had some (unspecified) responsibility for the occurrence of this prov:Activity. wasAttributedTo: URIRef # Attribution is the ascribing of an entity to an agent. wasDerivedFrom: URIRef # The more specific subproperties of prov:wasDerivedFrom (i.e., prov:wasQuotedFrom, prov:wasRevisionOf, prov:hadPrimarySource) should be used when applicable. wasEndedBy: URIRef # End is when an activity is deemed to have ended. An end may refer to an entity, known as trigger, that terminated the activity. wasGeneratedBy: URIRef # wasGeneratedBy wasInfluencedBy: URIRef # This property has multiple RDFS domains to suit multiple OWL Profiles. See PROV-O OWL Profile. wasInformedBy: URIRef # An activity a2 is dependent on or informed by another activity a1, by way of some unspecified entity that is generated by a1 and used by a2. wasInvalidatedBy: URIRef # wasInvalidatedBy wasQuotedFrom: URIRef # An entity is derived from an original entity by copying, or 'quoting', some or all of it. wasStartedBy: URIRef # Start is when an activity is deemed to have started. A start may refer to an entity, known as trigger, that initiated the activity. _NS = Namespace("http://www.w3.org/ns/prov#") rdflib-6.1.1/rdflib/namespace/_QB.py000066400000000000000000000124051415774155300172100ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class QB(DefinedNamespace): """ Vocabulary for multi-dimensional (e.g. statistical) data publishing This vocabulary allows multi-dimensional data, such as statistics, to be published in RDF. It is based on the core information model from SDMX (and thus also DDI). Generated from: http://purl.org/linked-data/cube# Date: 2020-05-26 14:20:05.485176 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property attribute: URIRef # An alternative to qb:componentProperty which makes explicit that the component is a attribute codeList: URIRef # gives the code list associated with a CodedProperty component: URIRef # indicates a component specification which is included in the structure of the dataset componentAttachment: URIRef # Indicates the level at which the component property should be attached, this might an qb:DataSet, qb:Slice or qb:Observation, or a qb:MeasureProperty. componentProperty: URIRef # indicates a ComponentProperty (i.e. attribute/dimension) expected on a DataSet, or a dimension fixed in a SliceKey componentRequired: URIRef # Indicates whether a component property is required (true) or optional (false) in the context of a DSD. Only applicable to components correspond to an attribute. Defaults to false (optional). concept: URIRef # gives the concept which is being measured or indicated by a ComponentProperty dataSet: URIRef # indicates the data set of which this observation is a part dimension: URIRef # An alternative to qb:componentProperty which makes explicit that the component is a dimension hierarchyRoot: URIRef # Specifies a root of the hierarchy. A hierarchy may have multiple roots but must have at least one. measure: URIRef # An alternative to qb:componentProperty which makes explicit that the component is a measure measureDimension: URIRef # An alternative to qb:componentProperty which makes explicit that the component is a measure dimension measureType: URIRef # Generic measure dimension, the value of this dimension indicates which measure (from the set of measures in the DSD) is being given by the obsValue (or other primary measure) observation: URIRef # indicates a observation contained within this slice of the data set observationGroup: URIRef # Indicates a group of observations. The domain of this property is left open so that a group may be attached to different resources and need not be restricted to a single DataSet order: URIRef # indicates a priority order for the components of sets with this structure, used to guide presentations - lower order numbers come before higher numbers, un-numbered components come last parentChildProperty: URIRef # Specifies a property which relates a parent concept in the hierarchy to a child concept. slice: URIRef # Indicates a subset of a DataSet defined by fixing a subset of the dimensional values sliceKey: URIRef # indicates a slice key which is used for slices in this dataset sliceStructure: URIRef # indicates the sub-key corresponding to this slice structure: URIRef # indicates the structure to which this data set conforms # http://www.w3.org/2000/01/rdf-schema#Class Attachable: URIRef # Abstract superclass for everything that can have attributes and dimensions AttributeProperty: URIRef # The class of components which represent attributes of observations in the cube, e.g. unit of measurement CodedProperty: URIRef # Superclass of all coded ComponentProperties ComponentProperty: URIRef # Abstract super-property of all properties representing dimensions, attributes or measures ComponentSet: URIRef # Abstract class of things which reference one or more ComponentProperties ComponentSpecification: URIRef # Used to define properties of a component (attribute, dimension etc) which are specific to its usage in a DSD. DataSet: URIRef # Represents a collection of observations, possibly organized into various slices, conforming to some common dimensional structure. DataStructureDefinition: URIRef # Defines the structure of a DataSet or slice DimensionProperty: URIRef # The class of components which represent the dimensions of the cube HierarchicalCodeList: URIRef # Represents a generalized hierarchy of concepts which can be used for coding. The hierarchy is defined by one or more roots together with a property which relates concepts in the hierarchy to their child concept . The same concepts may be members of multiple hierarchies provided that different qb:parentChildProperty values are used for each hierarchy. MeasureProperty: URIRef # The class of components which represent the measured value of the phenomenon being observed Observation: URIRef # A single observation in the cube, may have one or more associated measured values ObservationGroup: URIRef # A, possibly arbitrary, group of observations. Slice: URIRef # Denotes a subset of a DataSet defined by fixing a subset of the dimensional values, component properties on the Slice SliceKey: URIRef # Denotes a subset of the component properties of a DataSet which are fixed in the corresponding slices _NS = Namespace("http://purl.org/linked-data/cube#") rdflib-6.1.1/rdflib/namespace/_RDF.py000066400000000000000000000043111415774155300173160ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class RDF(DefinedNamespace): """ The RDF Concepts Vocabulary (RDF) This is the RDF Schema for the RDF vocabulary terms in the RDF Namespace, defined in RDF 1.1 Concepts. Generated from: http://www.w3.org/1999/02/22-rdf-syntax-ns# Date: 2020-05-26 14:20:05.642859 dc:date "2019-12-16" """ _fail = True _underscore_num = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#List nil: URIRef # The empty list, with no items in it. If the rest of a list is nil then the list has no more items in it. # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property direction: URIRef # The base direction component of a CompoundLiteral. first: URIRef # The first item in the subject RDF list. language: URIRef # The language component of a CompoundLiteral. object: URIRef # The object of the subject RDF statement. predicate: URIRef # The predicate of the subject RDF statement. rest: URIRef # The rest of the subject RDF list after the first item. subject: URIRef # The subject of the subject RDF statement. type: URIRef # The subject is an instance of a class. value: URIRef # Idiomatic property used for structured values. # http://www.w3.org/2000/01/rdf-schema#Class Alt: URIRef # The class of containers of alternatives. Bag: URIRef # The class of unordered containers. CompoundLiteral: URIRef # A class representing a compound literal. List: URIRef # The class of RDF Lists. Property: URIRef # The class of RDF properties. Seq: URIRef # The class of ordered containers. Statement: URIRef # The class of RDF statements. # http://www.w3.org/2000/01/rdf-schema#Datatype HTML: URIRef # The datatype of RDF literals storing fragments of HTML content JSON: URIRef # The datatype of RDF literals storing JSON content. PlainLiteral: URIRef # The class of plain (i.e. untyped) literal values, as used in RIF and OWL 2 XMLLiteral: URIRef # The datatype of XML literal values. langString: URIRef # The datatype of language-tagged string values _NS = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") rdflib-6.1.1/rdflib/namespace/_RDFS.py000066400000000000000000000027201415774155300174430ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class RDFS(DefinedNamespace): """ The RDF Schema vocabulary (RDFS) Generated from: http://www.w3.org/2000/01/rdf-schema# Date: 2020-05-26 14:20:05.794866 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property comment: URIRef # A description of the subject resource. domain: URIRef # A domain of the subject property. isDefinedBy: URIRef # The definition of the subject resource. label: URIRef # A human-readable name for the subject. member: URIRef # A member of the subject resource. range: URIRef # A range of the subject property. seeAlso: URIRef # Further information about the subject resource. subClassOf: URIRef # The subject is a subclass of a class. subPropertyOf: URIRef # The subject is a subproperty of a property. # http://www.w3.org/2000/01/rdf-schema#Class Class: URIRef # The class of classes. Container: URIRef # The class of RDF containers. ContainerMembershipProperty: URIRef # The class of container membership properties, rdf:_1, rdf:_2, ..., all of which are sub-properties of 'member'. Datatype: URIRef # The class of RDF datatypes. Literal: URIRef # The class of literal values, eg. textual strings and integers. Resource: URIRef # The class resource, everything. _NS = Namespace("http://www.w3.org/2000/01/rdf-schema#") rdflib-6.1.1/rdflib/namespace/_SDO.py000066400000000000000000014645751415774155300173570ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class SDO(DefinedNamespace): """ schema.org namespace elements 3DModel, True, False & yield are not available as they collde with Python terms Generated from: https://schema.org/version/latest/schemaorg-current-https.jsonld Date: 2021-12-01 By: Nicholas J. Car """ _NS = Namespace("https://schema.org/") # 3DModel: URIRef # A 3D model represents some kind of 3D content, which may have [[encoding]]s in one or more [[MediaObject]]s. Many 3D formats are available (e.g. see [Wikipedia](https://en.wikipedia.org/wiki/Category:3D_graphics_file_formats)); specific encoding formats can be represented using the [[encodingFormat]] property applied to the relevant [[MediaObject]]. For the case of a single file published after Zip compression, the convention of appending '+zip' to the [[encodingFormat]] can be used. Geospatial, AR/VR, artistic/animation, gaming, engineering and scientific content can all be represented using [[3DModel]]. AMRadioChannel: URIRef # A radio channel that uses AM. APIReference: URIRef # Reference documentation for application programming interfaces (APIs). Abdomen: URIRef # Abdomen clinical examination. AboutPage: URIRef # Web page type: About page. AcceptAction: URIRef # The act of committing to/adopting an object.\n\nRelated actions:\n\n* [[RejectAction]]: The antonym of AcceptAction. Accommodation: URIRef # An accommodation is a place that can accommodate human beings, e.g. a hotel room, a camping pitch, or a meeting room. Many accommodations are for overnight stays, but this is not a mandatory requirement. For more specific types of accommodations not defined in schema.org, one can use additionalType with external vocabularies.

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. AccountingService: URIRef # Accountancy business.\n\nAs a [[LocalBusiness]] it can be described as a [[provider]] of one or more [[Service]]\(s). AchieveAction: URIRef # The act of accomplishing something via previous efforts. It is an instantaneous action rather than an ongoing process. Action: URIRef # An action performed by a direct agent and indirect participants upon a direct object. Optionally happens at a location with the help of an inanimate instrument. The execution of the action may produce a result. Specific action sub-type documentation specifies the exact expectation of each argument/role.\n\nSee also [blog post](http://blog.schema.org/2014/04/announcing-schemaorg-actions.html) and [Actions overview document](https://schema.org/docs/actions.html). ActionAccessSpecification: URIRef # A set of requirements that a must be fulfilled in order to perform an Action. ActionStatusType: URIRef # The status of an Action. ActivateAction: URIRef # The act of starting or activating a device or application (e.g. starting a timer or turning on a flashlight). ActivationFee: URIRef # Represents the activation fee part of the total price for an offered product, for example a cellphone contract. ActiveActionStatus: URIRef # An in-progress action (e.g, while watching the movie, or driving to a location). ActiveNotRecruiting: URIRef # Active, but not recruiting new participants. AddAction: URIRef # The act of editing by adding an object to a collection. AdministrativeArea: URIRef # A geographical region, typically under the jurisdiction of a particular government. AdultEntertainment: URIRef # An adult entertainment establishment. AdvertiserContentArticle: URIRef # An [[Article]] that an external entity has paid to place or to produce to its specifications. Includes [advertorials](https://en.wikipedia.org/wiki/Advertorial), sponsored content, native advertising and other paid content. AerobicActivity: URIRef # Physical activity of relatively low intensity that depends primarily on the aerobic energy-generating process; during activity, the aerobic metabolism uses oxygen to adequately meet energy demands during exercise. AggregateOffer: URIRef # When a single product is associated with multiple offers (for example, the same pair of shoes is offered by different merchants), then AggregateOffer can be used.\n\nNote: AggregateOffers are normally expected to associate multiple offers that all share the same defined [[businessFunction]] value, or default to http://purl.org/goodrelations/v1#Sell if businessFunction is not explicitly defined. AggregateRating: URIRef # The average rating based on multiple ratings or reviews. AgreeAction: URIRef # The act of expressing a consistency of opinion with the object. An agent agrees to/about an object (a proposition, topic or theme) with participants. Airline: URIRef # An organization that provides flights for passengers. Airport: URIRef # An airport. AlbumRelease: URIRef # AlbumRelease. AlignmentObject: URIRef # An intangible item that describes an alignment between a learning resource and a node in an educational framework. Should not be used where the nature of the alignment can be described using a simple property, for example to express that a resource [[teaches]] or [[assesses]] a competency. AllWheelDriveConfiguration: URIRef # All-wheel Drive is a transmission layout where the engine drives all four wheels. AllergiesHealthAspect: URIRef # Content about the allergy-related aspects of a health topic. AllocateAction: URIRef # The act of organizing tasks/objects/events by associating resources to it. AmpStory: URIRef # A creative work with a visual storytelling format intended to be viewed online, particularly on mobile devices. AmusementPark: URIRef # An amusement park. AnaerobicActivity: URIRef # Physical activity that is of high-intensity which utilizes the anaerobic metabolism of the body. AnalysisNewsArticle: URIRef # An AnalysisNewsArticle is a [[NewsArticle]] that, while based on factual reporting, incorporates the expertise of the author/producer, offering interpretations and conclusions. AnatomicalStructure: URIRef # Any part of the human body, typically a component of an anatomical system. Organs, tissues, and cells are all anatomical structures. AnatomicalSystem: URIRef # An anatomical system is a group of anatomical structures that work together to perform a certain task. Anatomical systems, such as organ systems, are one organizing principle of anatomy, and can includes circulatory, digestive, endocrine, integumentary, immune, lymphatic, muscular, nervous, reproductive, respiratory, skeletal, urinary, vestibular, and other systems. Anesthesia: URIRef # A specific branch of medical science that pertains to study of anesthetics and their application. AnimalShelter: URIRef # Animal shelter. Answer: URIRef # An answer offered to a question; perhaps correct, perhaps opinionated or wrong. Apartment: URIRef # An apartment (in American English) or flat (in British English) is a self-contained housing unit (a type of residential real estate) that occupies only part of a building (Source: Wikipedia, the free encyclopedia, see http://en.wikipedia.org/wiki/Apartment). ApartmentComplex: URIRef # Residence type: Apartment complex. Appearance: URIRef # Appearance assessment with clinical examination. AppendAction: URIRef # The act of inserting at the end if an ordered collection. ApplyAction: URIRef # The act of registering to an organization/service without the guarantee to receive it.\n\nRelated actions:\n\n* [[RegisterAction]]: Unlike RegisterAction, ApplyAction has no guarantees that the application will be accepted. ApprovedIndication: URIRef # An indication for a medical therapy that has been formally specified or approved by a regulatory body that regulates use of the therapy; for example, the US FDA approves indications for most drugs in the US. Aquarium: URIRef # Aquarium. ArchiveComponent: URIRef # An intangible type to be applied to any archive content, carrying with it a set of properties required to describe archival items and collections. ArchiveOrganization: URIRef # An organization with archival holdings. An organization which keeps and preserves archival material and typically makes it accessible to the public. ArriveAction: URIRef # The act of arriving at a place. An agent arrives at a destination from a fromLocation, optionally with participants. ArtGallery: URIRef # An art gallery. Artery: URIRef # A type of blood vessel that specifically carries blood away from the heart. Article: URIRef # An article, such as a news article or piece of investigative report. Newspapers and magazines have articles of many different types and this is intended to cover them all.\n\nSee also [blog post](http://blog.schema.org/2014/09/schemaorg-support-for-bibliographic_2.html). AskAction: URIRef # The act of posing a question / favor to someone.\n\nRelated actions:\n\n* [[ReplyAction]]: Appears generally as a response to AskAction. AskPublicNewsArticle: URIRef # A [[NewsArticle]] expressing an open call by a [[NewsMediaOrganization]] asking the public for input, insights, clarifications, anecdotes, documentation, etc., on an issue, for reporting purposes. AssessAction: URIRef # The act of forming one's opinion, reaction or sentiment. AssignAction: URIRef # The act of allocating an action/event/task to some destination (someone or something). Atlas: URIRef # A collection or bound volume of maps, charts, plates or tables, physical or in media form illustrating any subject. Attorney: URIRef # Professional service: Attorney. \n\nThis type is deprecated - [[LegalService]] is more inclusive and less ambiguous. Audience: URIRef # Intended audience for an item, i.e. the group for whom the item was created. AudioObject: URIRef # An audio file. AudioObjectSnapshot: URIRef # A specific and exact (byte-for-byte) version of an [[AudioObject]]. Two byte-for-byte identical files, for the purposes of this type, considered identical. If they have different embedded metadata the files will differ. Different external facts about the files, e.g. creator or dateCreated that aren't represented in their actual content, do not affect this notion of identity. Audiobook: URIRef # An audiobook. AudiobookFormat: URIRef # Book format: Audiobook. This is an enumerated value for use with the bookFormat property. There is also a type 'Audiobook' in the bib extension which includes Audiobook specific properties. AuthoritativeLegalValue: URIRef # Indicates that the publisher gives some special status to the publication of the document. ("The Queens Printer" version of a UK Act of Parliament, or the PDF version of a Directive published by the EU Office of Publications). Something "Authoritative" is considered to be also [[OfficialLegalValue]]". AuthorizeAction: URIRef # The act of granting permission to an object. AutoBodyShop: URIRef # Auto body shop. AutoDealer: URIRef # An car dealership. AutoPartsStore: URIRef # An auto parts store. AutoRental: URIRef # A car rental business. AutoRepair: URIRef # Car repair business. AutoWash: URIRef # A car wash business. AutomatedTeller: URIRef # ATM/cash machine. AutomotiveBusiness: URIRef # Car repair, sales, or parts. Ayurvedic: URIRef # A system of medicine that originated in India over thousands of years and that focuses on integrating and balancing the body, mind, and spirit. BackOrder: URIRef # Indicates that the item is available on back order. BackgroundNewsArticle: URIRef # A [[NewsArticle]] providing historical context, definition and detail on a specific topic (aka "explainer" or "backgrounder"). For example, an in-depth article or frequently-asked-questions ([FAQ](https://en.wikipedia.org/wiki/FAQ)) document on topics such as Climate Change or the European Union. Other kinds of background material from a non-news setting are often described using [[Book]] or [[Article]], in particular [[ScholarlyArticle]]. See also [[NewsArticle]] for related vocabulary from a learning/education perspective. Bacteria: URIRef # Pathogenic bacteria that cause bacterial infection. Bakery: URIRef # A bakery. Balance: URIRef # Physical activity that is engaged to help maintain posture and balance. BankAccount: URIRef # A product or service offered by a bank whereby one may deposit, withdraw or transfer money and in some cases be paid interest. BankOrCreditUnion: URIRef # Bank or credit union. BarOrPub: URIRef # A bar or pub. Barcode: URIRef # An image of a visual machine-readable code such as a barcode or QR code. BasicIncome: URIRef # BasicIncome: this is a benefit for basic income. Beach: URIRef # Beach. BeautySalon: URIRef # Beauty salon. BedAndBreakfast: URIRef # Bed and breakfast.

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. BedDetails: URIRef # An entity holding detailed information about the available bed types, e.g. the quantity of twin beds for a hotel room. For the single case of just one bed of a certain type, you can use bed directly with a text. See also [[BedType]] (under development). BedType: URIRef # A type of bed. This is used for indicating the bed or beds available in an accommodation. BefriendAction: URIRef # The act of forming a personal connection with someone (object) mutually/bidirectionally/symmetrically.\n\nRelated actions:\n\n* [[FollowAction]]: Unlike FollowAction, BefriendAction implies that the connection is reciprocal. BenefitsHealthAspect: URIRef # Content about the benefits and advantages of usage or utilization of topic. BikeStore: URIRef # A bike store. BioChemEntity: URIRef # Any biological, chemical, or biochemical thing. For example: a protein; a gene; a chemical; a synthetic chemical. Blog: URIRef # A [blog](https://en.wikipedia.org/wiki/Blog), sometimes known as a "weblog". Note that the individual posts ([[BlogPosting]]s) in a [[Blog]] are often colloqually referred to by the same term. BlogPosting: URIRef # A blog post. BloodTest: URIRef # A medical test performed on a sample of a patient's blood. BoardingPolicyType: URIRef # A type of boarding policy used by an airline. BoatReservation: URIRef # A reservation for boat travel. Note: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use [[Offer]]. BoatTerminal: URIRef # A terminal for boats, ships, and other water vessels. BoatTrip: URIRef # A trip on a commercial ferry line. BodyMeasurementArm: URIRef # Arm length (measured between arms/shoulder line intersection and the prominent wrist bone). Used, for example, to fit shirts. BodyMeasurementBust: URIRef # Maximum girth of bust. Used, for example, to fit women's suits. BodyMeasurementChest: URIRef # Maximum girth of chest. Used, for example, to fit men's suits. BodyMeasurementFoot: URIRef # Foot length (measured between end of the most prominent toe and the most prominent part of the heel). Used, for example, to measure socks. BodyMeasurementHand: URIRef # Maximum hand girth (measured over the knuckles of the open right hand excluding thumb, fingers together). Used, for example, to fit gloves. BodyMeasurementHead: URIRef # Maximum girth of head above the ears. Used, for example, to fit hats. BodyMeasurementHeight: URIRef # Body height (measured between crown of head and soles of feet). Used, for example, to fit jackets. BodyMeasurementHips: URIRef # Girth of hips (measured around the buttocks). Used, for example, to fit skirts. BodyMeasurementInsideLeg: URIRef # Inside leg (measured between crotch and soles of feet). Used, for example, to fit pants. BodyMeasurementNeck: URIRef # Girth of neck. Used, for example, to fit shirts. BodyMeasurementTypeEnumeration: URIRef # Enumerates types (or dimensions) of a person's body measurements, for example for fitting of clothes. BodyMeasurementUnderbust: URIRef # Girth of body just below the bust. Used, for example, to fit women's swimwear. BodyMeasurementWaist: URIRef # Girth of natural waistline (between hip bones and lower ribs). Used, for example, to fit pants. BodyMeasurementWeight: URIRef # Body weight. Used, for example, to measure pantyhose. BodyOfWater: URIRef # A body of water, such as a sea, ocean, or lake. Bone: URIRef # Rigid connective tissue that comprises up the skeletal structure of the human body. Book: URIRef # A book. BookFormatType: URIRef # The publication format of the book. BookSeries: URIRef # A series of books. Included books can be indicated with the hasPart property. BookStore: URIRef # A bookstore. BookmarkAction: URIRef # An agent bookmarks/flags/labels/tags/marks an object. Boolean: URIRef # Boolean: True or False. BorrowAction: URIRef # The act of obtaining an object under an agreement to return it at a later date. Reciprocal of LendAction.\n\nRelated actions:\n\n* [[LendAction]]: Reciprocal of BorrowAction. BowlingAlley: URIRef # A bowling alley. BrainStructure: URIRef # Any anatomical structure which pertains to the soft nervous tissue functioning as the coordinating center of sensation and intellectual and nervous activity. Brand: URIRef # A brand is a name used by an organization or business person for labeling a product, product group, or similar. BreadcrumbList: URIRef # A BreadcrumbList is an ItemList consisting of a chain of linked Web pages, typically described using at least their URL and their name, and typically ending with the current page.\n\nThe [[position]] property is used to reconstruct the order of the items in a BreadcrumbList The convention is that a breadcrumb list has an [[itemListOrder]] of [[ItemListOrderAscending]] (lower values listed first), and that the first items in this list correspond to the "top" or beginning of the breadcrumb trail, e.g. with a site or section homepage. The specific values of 'position' are not assigned meaning for a BreadcrumbList, but they should be integers, e.g. beginning with '1' for the first item in the list. Brewery: URIRef # Brewery. Bridge: URIRef # A bridge. BroadcastChannel: URIRef # A unique instance of a BroadcastService on a CableOrSatelliteService lineup. BroadcastEvent: URIRef # An over the air or online broadcast event. BroadcastFrequencySpecification: URIRef # The frequency in MHz and the modulation used for a particular BroadcastService. BroadcastRelease: URIRef # BroadcastRelease. BroadcastService: URIRef # A delivery service through which content is provided via broadcast over the air or online. BrokerageAccount: URIRef # An account that allows an investor to deposit funds and place investment orders with a licensed broker or brokerage firm. BuddhistTemple: URIRef # A Buddhist temple. BusOrCoach: URIRef # A bus (also omnibus or autobus) is a road vehicle designed to carry passengers. Coaches are luxury busses, usually in service for long distance travel. BusReservation: URIRef # A reservation for bus travel. \n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use [[Offer]]. BusStation: URIRef # A bus station. BusStop: URIRef # A bus stop. BusTrip: URIRef # A trip on a commercial bus line. BusinessAudience: URIRef # A set of characteristics belonging to businesses, e.g. who compose an item's target audience. BusinessEntityType: URIRef # A business entity type is a conceptual entity representing the legal form, the size, the main line of business, the position in the value chain, or any combination thereof, of an organization or business person.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#Business\n* http://purl.org/goodrelations/v1#Enduser\n* http://purl.org/goodrelations/v1#PublicInstitution\n* http://purl.org/goodrelations/v1#Reseller BusinessEvent: URIRef # Event type: Business event. BusinessFunction: URIRef # The business function specifies the type of activity or access (i.e., the bundle of rights) offered by the organization or business person through the offer. Typical are sell, rental or lease, maintenance or repair, manufacture / produce, recycle / dispose, engineering / construction, or installation. Proprietary specifications of access rights are also instances of this class.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#ConstructionInstallation\n* http://purl.org/goodrelations/v1#Dispose\n* http://purl.org/goodrelations/v1#LeaseOut\n* http://purl.org/goodrelations/v1#Maintain\n* http://purl.org/goodrelations/v1#ProvideService\n* http://purl.org/goodrelations/v1#Repair\n* http://purl.org/goodrelations/v1#Sell\n* http://purl.org/goodrelations/v1#Buy BusinessSupport: URIRef # BusinessSupport: this is a benefit for supporting businesses. BuyAction: URIRef # The act of giving money to a seller in exchange for goods or services rendered. An agent buys an object, product, or service from a seller for a price. Reciprocal of SellAction. CDCPMDRecord: URIRef # A CDCPMDRecord is a data structure representing a record in a CDC tabular data format used for hospital data reporting. See [documentation](/docs/cdc-covid.html) for details, and the linked CDC materials for authoritative definitions used as the source here. CDFormat: URIRef # CDFormat. CT: URIRef # X-ray computed tomography imaging. CableOrSatelliteService: URIRef # A service which provides access to media programming like TV or radio. Access may be via cable or satellite. CafeOrCoffeeShop: URIRef # A cafe or coffee shop. Campground: URIRef # A camping site, campsite, or [[Campground]] is a place used for overnight stay in the outdoors, typically containing individual [[CampingPitch]] locations. \n\n In British English a campsite is an area, usually divided into a number of pitches, where people can camp overnight using tents or camper vans or caravans; this British English use of the word is synonymous with the American English expression campground. In American English the term campsite generally means an area where an individual, family, group, or military unit can pitch a tent or park a camper; a campground may contain many campsites (Source: Wikipedia see [https://en.wikipedia.org/wiki/Campsite](https://en.wikipedia.org/wiki/Campsite)).\n\n See also the dedicated [document on the use of schema.org for marking up hotels and other forms of accommodations](/docs/hotels.html). CampingPitch: URIRef # A [[CampingPitch]] is an individual place for overnight stay in the outdoors, typically being part of a larger camping site, or [[Campground]].\n\n In British English a campsite, or campground, is an area, usually divided into a number of pitches, where people can camp overnight using tents or camper vans or caravans; this British English use of the word is synonymous with the American English expression campground. In American English the term campsite generally means an area where an individual, family, group, or military unit can pitch a tent or park a camper; a campground may contain many campsites. (Source: Wikipedia see [https://en.wikipedia.org/wiki/Campsite](https://en.wikipedia.org/wiki/Campsite)).\n\n See also the dedicated [document on the use of schema.org for marking up hotels and other forms of accommodations](/docs/hotels.html). Canal: URIRef # A canal, like the Panama Canal. CancelAction: URIRef # The act of asserting that a future event/action is no longer going to happen.\n\nRelated actions:\n\n* [[ConfirmAction]]: The antonym of CancelAction. Car: URIRef # A car is a wheeled, self-powered motor vehicle used for transportation. CarUsageType: URIRef # A value indicating a special usage of a car, e.g. commercial rental, driving school, or as a taxi. Cardiovascular: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of heart and vasculature. CardiovascularExam: URIRef # Cardiovascular system assessment withclinical examination. CaseSeries: URIRef # A case series (also known as a clinical series) is a medical research study that tracks patients with a known exposure given similar treatment or examines their medical records for exposure and outcome. A case series can be retrospective or prospective and usually involves a smaller number of patients than the more powerful case-control studies or randomized controlled trials. Case series may be consecutive or non-consecutive, depending on whether all cases presenting to the reporting authors over a period of time were included, or only a selection. Casino: URIRef # A casino. CassetteFormat: URIRef # CassetteFormat. CategoryCode: URIRef # A Category Code. CategoryCodeSet: URIRef # A set of Category Code values. CatholicChurch: URIRef # A Catholic church. CausesHealthAspect: URIRef # Information about the causes and main actions that gave rise to the topic. Cemetery: URIRef # A graveyard. Chapter: URIRef # One of the sections into which a book is divided. A chapter usually has a section number or a name. CharitableIncorporatedOrganization: URIRef # CharitableIncorporatedOrganization: Non-profit type referring to a Charitable Incorporated Organization (UK). CheckAction: URIRef # An agent inspects, determines, investigates, inquires, or examines an object's accuracy, quality, condition, or state. CheckInAction: URIRef # The act of an agent communicating (service provider, social media, etc) their arrival by registering/confirming for a previously reserved service (e.g. flight check in) or at a place (e.g. hotel), possibly resulting in a result (boarding pass, etc).\n\nRelated actions:\n\n* [[CheckOutAction]]: The antonym of CheckInAction.\n* [[ArriveAction]]: Unlike ArriveAction, CheckInAction implies that the agent is informing/confirming the start of a previously reserved service.\n* [[ConfirmAction]]: Unlike ConfirmAction, CheckInAction implies that the agent is informing/confirming the *start* of a previously reserved service rather than its validity/existence. CheckOutAction: URIRef # The act of an agent communicating (service provider, social media, etc) their departure of a previously reserved service (e.g. flight check in) or place (e.g. hotel).\n\nRelated actions:\n\n* [[CheckInAction]]: The antonym of CheckOutAction.\n* [[DepartAction]]: Unlike DepartAction, CheckOutAction implies that the agent is informing/confirming the end of a previously reserved service.\n* [[CancelAction]]: Unlike CancelAction, CheckOutAction implies that the agent is informing/confirming the end of a previously reserved service. CheckoutPage: URIRef # Web page type: Checkout page. ChemicalSubstance: URIRef # A chemical substance is 'a portion of matter of constant composition, composed of molecular entities of the same type or of different types' (source: [ChEBI:59999](https://www.ebi.ac.uk/chebi/searchId.do?chebiId=59999)). ChildCare: URIRef # A Childcare center. ChildrensEvent: URIRef # Event type: Children's event. Chiropractic: URIRef # A system of medicine focused on the relationship between the body's structure, mainly the spine, and its functioning. ChooseAction: URIRef # The act of expressing a preference from a set of options or a large or unbounded set of choices/options. Church: URIRef # A church. City: URIRef # A city or town. CityHall: URIRef # A city hall. CivicStructure: URIRef # A public structure, such as a town hall or concert hall. Claim: URIRef # A [[Claim]] in Schema.org represents a specific, factually-oriented claim that could be the [[itemReviewed]] in a [[ClaimReview]]. The content of a claim can be summarized with the [[text]] property. Variations on well known claims can have their common identity indicated via [[sameAs]] links, and summarized with a [[name]]. Ideally, a [[Claim]] description includes enough contextual information to minimize the risk of ambiguity or inclarity. In practice, many claims are better understood in the context in which they appear or the interpretations provided by claim reviews. Beyond [[ClaimReview]], the Claim type can be associated with related creative works - for example a [[ScholarlyArticle]] or [[Question]] might be [[about]] some [[Claim]]. At this time, Schema.org does not define any types of relationship between claims. This is a natural area for future exploration. ClaimReview: URIRef # A fact-checking review of claims made (or reported) in some creative work (referenced via itemReviewed). Class: URIRef # A class, also often called a 'Type'; equivalent to rdfs:Class. CleaningFee: URIRef # Represents the cleaning fee part of the total price for an offered product, for example a vacation rental. Clinician: URIRef # Medical clinicians, including practicing physicians and other medical professionals involved in clinical practice. Clip: URIRef # A short TV or radio program or a segment/part of a program. ClothingStore: URIRef # A clothing store. CoOp: URIRef # Play mode: CoOp. Co-operative games, where you play on the same team with friends. Code: URIRef # Computer programming source code. Example: Full (compile ready) solutions, code snippet samples, scripts, templates. CohortStudy: URIRef # Also known as a panel study. A cohort study is a form of longitudinal study used in medicine and social science. It is one type of study design and should be compared with a cross-sectional study. A cohort is a group of people who share a common characteristic or experience within a defined period (e.g., are born, leave school, lose their job, are exposed to a drug or a vaccine, etc.). The comparison group may be the general population from which the cohort is drawn, or it may be another cohort of persons thought to have had little or no exposure to the substance under investigation, but otherwise similar. Alternatively, subgroups within the cohort may be compared with each other. Collection: URIRef # A collection of items e.g. creative works or products. CollectionPage: URIRef # Web page type: Collection page. CollegeOrUniversity: URIRef # A college, university, or other third-level educational institution. ComedyClub: URIRef # A comedy club. ComedyEvent: URIRef # Event type: Comedy event. ComicCoverArt: URIRef # The artwork on the cover of a comic. ComicIssue: URIRef # Individual comic issues are serially published as part of a larger series. For the sake of consistency, even one-shot issues belong to a series comprised of a single issue. All comic issues can be uniquely identified by: the combination of the name and volume number of the series to which the issue belongs; the issue number; and the variant description of the issue (if any). ComicSeries: URIRef # A sequential publication of comic stories under a unifying title, for example "The Amazing Spider-Man" or "Groo the Wanderer". ComicStory: URIRef # The term "story" is any indivisible, re-printable unit of a comic, including the interior stories, covers, and backmatter. Most comics have at least two stories: a cover (ComicCoverArt) and an interior story. Comment: URIRef # A comment on an item - for example, a comment on a blog post. The comment's content is expressed via the [[text]] property, and its topic via [[about]], properties shared with all CreativeWorks. CommentAction: URIRef # The act of generating a comment about a subject. CommentPermission: URIRef # Permission to add comments to the document. CommunicateAction: URIRef # The act of conveying information to another person via a communication medium (instrument) such as speech, email, or telephone conversation. CommunityHealth: URIRef # A field of public health focusing on improving health characteristics of a defined population in relation with their geographical or environment areas. CompilationAlbum: URIRef # CompilationAlbum. CompleteDataFeed: URIRef # A [[CompleteDataFeed]] is a [[DataFeed]] whose standard representation includes content for every item currently in the feed. This is the equivalent of Atom's element as defined in Feed Paging and Archiving [RFC 5005](https://tools.ietf.org/html/rfc5005), For example (and as defined for Atom), when using data from a feed that represents a collection of items that varies over time (e.g. "Top Twenty Records") there is no need to have newer entries mixed in alongside older, obsolete entries. By marking this feed as a CompleteDataFeed, old entries can be safely discarded when the feed is refreshed, since we can assume the feed has provided descriptions for all current items. Completed: URIRef # Completed. CompletedActionStatus: URIRef # An action that has already taken place. CompoundPriceSpecification: URIRef # A compound price specification is one that bundles multiple prices that all apply in combination for different dimensions of consumption. Use the name property of the attached unit price specification for indicating the dimension of a price component (e.g. "electricity" or "final cleaning"). ComputerLanguage: URIRef # This type covers computer programming languages such as Scheme and Lisp, as well as other language-like computer representations. Natural languages are best represented with the [[Language]] type. ComputerStore: URIRef # A computer store. ConfirmAction: URIRef # The act of notifying someone that a future event/action is going to happen as expected.\n\nRelated actions:\n\n* [[CancelAction]]: The antonym of ConfirmAction. Consortium: URIRef # A Consortium is a membership [[Organization]] whose members are typically Organizations. ConsumeAction: URIRef # The act of ingesting information/resources/food. ContactPage: URIRef # Web page type: Contact page. ContactPoint: URIRef # A contact point—for example, a Customer Complaints department. ContactPointOption: URIRef # Enumerated options related to a ContactPoint. ContagiousnessHealthAspect: URIRef # Content about contagion mechanisms and contagiousness information over the topic. Continent: URIRef # One of the continents (for example, Europe or Africa). ControlAction: URIRef # An agent controls a device or application. ConvenienceStore: URIRef # A convenience store. Conversation: URIRef # One or more messages between organizations or people on a particular topic. Individual messages can be linked to the conversation with isPartOf or hasPart properties. CookAction: URIRef # The act of producing/preparing food. Corporation: URIRef # Organization: A business corporation. CorrectionComment: URIRef # A [[comment]] that corrects [[CreativeWork]]. Country: URIRef # A country. Course: URIRef # A description of an educational course which may be offered as distinct instances at which take place at different times or take place at different locations, or be offered through different media or modes of study. An educational course is a sequence of one or more educational events and/or creative works which aims to build knowledge, competence or ability of learners. CourseInstance: URIRef # An instance of a [[Course]] which is distinct from other instances because it is offered at a different time or location or through different media or modes of study or to a specific section of students. Courthouse: URIRef # A courthouse. CoverArt: URIRef # The artwork on the outer surface of a CreativeWork. CovidTestingFacility: URIRef # A CovidTestingFacility is a [[MedicalClinic]] where testing for the COVID-19 Coronavirus disease is available. If the facility is being made available from an established [[Pharmacy]], [[Hotel]], or other non-medical organization, multiple types can be listed. This makes it easier to re-use existing schema.org information about that place e.g. contact info, address, opening hours. Note that in an emergency, such information may not always be reliable. CreateAction: URIRef # The act of deliberately creating/producing/generating/building a result out of the agent. CreativeWork: URIRef # The most generic kind of creative work, including books, movies, photographs, software programs, etc. CreativeWorkSeason: URIRef # A media season e.g. tv, radio, video game etc. CreativeWorkSeries: URIRef # A CreativeWorkSeries in schema.org is a group of related items, typically but not necessarily of the same kind. CreativeWorkSeries are usually organized into some order, often chronological. Unlike [[ItemList]] which is a general purpose data structure for lists of things, the emphasis with CreativeWorkSeries is on published materials (written e.g. books and periodicals, or media such as tv, radio and games).\n\nSpecific subtypes are available for describing [[TVSeries]], [[RadioSeries]], [[MovieSeries]], [[BookSeries]], [[Periodical]] and [[VideoGameSeries]]. In each case, the [[hasPart]] / [[isPartOf]] properties can be used to relate the CreativeWorkSeries to its parts. The general CreativeWorkSeries type serves largely just to organize these more specific and practical subtypes.\n\nIt is common for properties applicable to an item from the series to be usefully applied to the containing group. Schema.org attempts to anticipate some of these cases, but publishers should be free to apply properties of the series parts to the series as a whole wherever they seem appropriate. CreditCard: URIRef # A card payment method of a particular brand or name. Used to mark up a particular payment method and/or the financial product/service that supplies the card account.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#AmericanExpress\n* http://purl.org/goodrelations/v1#DinersClub\n* http://purl.org/goodrelations/v1#Discover\n* http://purl.org/goodrelations/v1#JCB\n* http://purl.org/goodrelations/v1#MasterCard\n* http://purl.org/goodrelations/v1#VISA Crematorium: URIRef # A crematorium. CriticReview: URIRef # A [[CriticReview]] is a more specialized form of Review written or published by a source that is recognized for its reviewing activities. These can include online columns, travel and food guides, TV and radio shows, blogs and other independent Web sites. [[CriticReview]]s are typically more in-depth and professionally written. For simpler, casually written user/visitor/viewer/customer reviews, it is more appropriate to use the [[UserReview]] type. Review aggregator sites such as Metacritic already separate out the site's user reviews from selected critic reviews that originate from third-party sources. CrossSectional: URIRef # Studies carried out on pre-existing data (usually from 'snapshot' surveys), such as that collected by the Census Bureau. Sometimes called Prevalence Studies. CssSelectorType: URIRef # Text representing a CSS selector. CurrencyConversionService: URIRef # A service to convert funds from one currency to another currency. DDxElement: URIRef # An alternative, closely-related condition typically considered later in the differential diagnosis process along with the signs that are used to distinguish it. DJMixAlbum: URIRef # DJMixAlbum. DVDFormat: URIRef # DVDFormat. DamagedCondition: URIRef # Indicates that the item is damaged. DanceEvent: URIRef # Event type: A social dance. DanceGroup: URIRef # A dance group—for example, the Alvin Ailey Dance Theater or Riverdance. DataCatalog: URIRef # A collection of datasets. DataDownload: URIRef # A dataset in downloadable form. DataFeed: URIRef # A single feed providing structured information about one or more entities or topics. DataFeedItem: URIRef # A single item within a larger data feed. DataType: URIRef # The basic data types such as Integers, Strings, etc. Dataset: URIRef # A body of structured information describing some topic(s) of interest. Date: URIRef # A date value in [ISO 8601 date format](http://en.wikipedia.org/wiki/ISO_8601). DateTime: URIRef # A combination of date and time of day in the form [-]CCYY-MM-DDThh:mm:ss[Z|(+|-)hh:mm] (see Chapter 5.4 of ISO 8601). DatedMoneySpecification: URIRef # A DatedMoneySpecification represents monetary values with optional start and end dates. For example, this could represent an employee's salary over a specific period of time. __Note:__ This type has been superseded by [[MonetaryAmount]] use of that type is recommended DayOfWeek: URIRef # The day of the week, e.g. used to specify to which day the opening hours of an OpeningHoursSpecification refer. Originally, URLs from [GoodRelations](http://purl.org/goodrelations/v1) were used (for [[Monday]], [[Tuesday]], [[Wednesday]], [[Thursday]], [[Friday]], [[Saturday]], [[Sunday]] plus a special entry for [[PublicHolidays]]); these have now been integrated directly into schema.org. DaySpa: URIRef # A day spa. DeactivateAction: URIRef # The act of stopping or deactivating a device or application (e.g. stopping a timer or turning off a flashlight). DecontextualizedContent: URIRef # Content coded 'missing context' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'missing context': Presenting unaltered video in an inaccurate manner that misrepresents the footage. For example, using incorrect dates or locations, altering the transcript or sharing brief clips from a longer video to mislead viewers. (A video rated 'original' can also be missing context.) For an [[ImageObject]] to be 'missing context': Presenting unaltered images in an inaccurate manner to misrepresent the image and mislead the viewer. For example, a common tactic is using an unaltered image but saying it came from a different time or place. (An image rated 'original' can also be missing context.) For an [[ImageObject]] with embedded text to be 'missing context': An unaltered image presented in an inaccurate manner to misrepresent the image and mislead the viewer. For example, a common tactic is using an unaltered image but saying it came from a different time or place. (An 'original' image with inaccurate text would generally fall in this category.) For an [[AudioObject]] to be 'missing context': Unaltered audio presented in an inaccurate manner that misrepresents it. For example, using incorrect dates or locations, or sharing brief clips from a longer recording to mislead viewers. (Audio rated “original” can also be missing context.) DefenceEstablishment: URIRef # A defence establishment, such as an army or navy base. DefinedRegion: URIRef # A DefinedRegion is a geographic area defined by potentially arbitrary (rather than political, administrative or natural geographical) criteria. Properties are provided for defining a region by reference to sets of postal codes. Examples: a delivery destination when shopping. Region where regional pricing is configured. Requirement 1: Country: US States: "NY", "CA" Requirement 2: Country: US PostalCode Set: { [94000-94585], [97000, 97999], [13000, 13599]} { [12345, 12345], [78945, 78945], } Region = state, canton, prefecture, autonomous community... DefinedTerm: URIRef # A word, name, acronym, phrase, etc. with a formal definition. Often used in the context of category or subject classification, glossaries or dictionaries, product or creative work types, etc. Use the name property for the term being defined, use termCode if the term has an alpha-numeric code allocated, use description to provide the definition of the term. DefinedTermSet: URIRef # A set of defined terms for example a set of categories or a classification scheme, a glossary, dictionary or enumeration. DefinitiveLegalValue: URIRef # Indicates a document for which the text is conclusively what the law says and is legally binding. (e.g. The digitally signed version of an Official Journal.) Something "Definitive" is considered to be also [[AuthoritativeLegalValue]]. DeleteAction: URIRef # The act of editing a recipient by removing one of its objects. DeliveryChargeSpecification: URIRef # The price for the delivery of an offer using a particular delivery method. DeliveryEvent: URIRef # An event involving the delivery of an item. DeliveryMethod: URIRef # A delivery method is a standardized procedure for transferring the product or service to the destination of fulfillment chosen by the customer. Delivery methods are characterized by the means of transportation used, and by the organization or group that is the contracting party for the sending organization or person.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#DeliveryModeDirectDownload\n* http://purl.org/goodrelations/v1#DeliveryModeFreight\n* http://purl.org/goodrelations/v1#DeliveryModeMail\n* http://purl.org/goodrelations/v1#DeliveryModeOwnFleet\n* http://purl.org/goodrelations/v1#DeliveryModePickUp\n* http://purl.org/goodrelations/v1#DHL\n* http://purl.org/goodrelations/v1#FederalExpress\n* http://purl.org/goodrelations/v1#UPS DeliveryTimeSettings: URIRef # A DeliveryTimeSettings represents re-usable pieces of shipping information, relating to timing. It is designed for publication on an URL that may be referenced via the [[shippingSettingsLink]] property of a [[OfferShippingDetails]]. Several occurrences can be published, distinguished (and identified/referenced) by their different values for [[transitTimeLabel]]. Demand: URIRef # A demand entity represents the public, not necessarily binding, not necessarily exclusive, announcement by an organization or person to seek a certain type of goods or services. For describing demand using this type, the very same properties used for Offer apply. DemoAlbum: URIRef # DemoAlbum. Dentist: URIRef # A dentist. Dentistry: URIRef # A branch of medicine that is involved in the dental care. DepartAction: URIRef # The act of departing from a place. An agent departs from an fromLocation for a destination, optionally with participants. DepartmentStore: URIRef # A department store. DepositAccount: URIRef # A type of Bank Account with a main purpose of depositing funds to gain interest or other benefits. Dermatologic: URIRef # Something relating to or practicing dermatology. Dermatology: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of skin. DiabeticDiet: URIRef # A diet appropriate for people with diabetes. Diagnostic: URIRef # A medical device used for diagnostic purposes. DiagnosticLab: URIRef # A medical laboratory that offers on-site or off-site diagnostic services. DiagnosticProcedure: URIRef # A medical procedure intended primarily for diagnostic, as opposed to therapeutic, purposes. Diet: URIRef # A strategy of regulating the intake of food to achieve or maintain a specific health-related goal. DietNutrition: URIRef # Dietetic and nutrition as a medical specialty. DietarySupplement: URIRef # A product taken by mouth that contains a dietary ingredient intended to supplement the diet. Dietary ingredients may include vitamins, minerals, herbs or other botanicals, amino acids, and substances such as enzymes, organ tissues, glandulars and metabolites. DigitalAudioTapeFormat: URIRef # DigitalAudioTapeFormat. DigitalDocument: URIRef # An electronic file or document. DigitalDocumentPermission: URIRef # A permission for a particular person or group to access a particular file. DigitalDocumentPermissionType: URIRef # A type of permission which can be granted for accessing a digital document. DigitalFormat: URIRef # DigitalFormat. DisabilitySupport: URIRef # DisabilitySupport: this is a benefit for disability support. DisagreeAction: URIRef # The act of expressing a difference of opinion with the object. An agent disagrees to/about an object (a proposition, topic or theme) with participants. Discontinued: URIRef # Indicates that the item has been discontinued. DiscoverAction: URIRef # The act of discovering/finding an object. DiscussionForumPosting: URIRef # A posting to a discussion forum. DislikeAction: URIRef # The act of expressing a negative sentiment about the object. An agent dislikes an object (a proposition, topic or theme) with participants. Distance: URIRef # Properties that take Distances as values are of the form '<Number> <Length unit of measure>'. E.g., '7 ft'. DistanceFee: URIRef # Represents the distance fee (e.g., price per km or mile) part of the total price for an offered product, for example a car rental. Distillery: URIRef # A distillery. DonateAction: URIRef # The act of providing goods, services, or money without compensation, often for philanthropic reasons. DoseSchedule: URIRef # A specific dosing schedule for a drug or supplement. DoubleBlindedTrial: URIRef # A trial design in which neither the researcher nor the patient knows the details of the treatment the patient was randomly assigned to. DownloadAction: URIRef # The act of downloading an object. Downpayment: URIRef # Represents the downpayment (up-front payment) price component of the total price for an offered product that has additional installment payments. DrawAction: URIRef # The act of producing a visual/graphical representation of an object, typically with a pen/pencil and paper as instruments. Drawing: URIRef # A picture or diagram made with a pencil, pen, or crayon rather than paint. DrinkAction: URIRef # The act of swallowing liquids. DriveWheelConfigurationValue: URIRef # A value indicating which roadwheels will receive torque. DrivingSchoolVehicleUsage: URIRef # Indicates the usage of the vehicle for driving school. Drug: URIRef # A chemical or biologic substance, used as a medical therapy, that has a physiological effect on an organism. Here the term drug is used interchangeably with the term medicine although clinical knowledge make a clear difference between them. DrugClass: URIRef # A class of medical drugs, e.g., statins. Classes can represent general pharmacological class, common mechanisms of action, common physiological effects, etc. DrugCost: URIRef # The cost per unit of a medical drug. Note that this type is not meant to represent the price in an offer of a drug for sale; see the Offer type for that. This type will typically be used to tag wholesale or average retail cost of a drug, or maximum reimbursable cost. Costs of medical drugs vary widely depending on how and where they are paid for, so while this type captures some of the variables, costs should be used with caution by consumers of this schema's markup. DrugCostCategory: URIRef # Enumerated categories of medical drug costs. DrugLegalStatus: URIRef # The legal availability status of a medical drug. DrugPregnancyCategory: URIRef # Categories that represent an assessment of the risk of fetal injury due to a drug or pharmaceutical used as directed by the mother during pregnancy. DrugPrescriptionStatus: URIRef # Indicates whether this drug is available by prescription or over-the-counter. DrugStrength: URIRef # A specific strength in which a medical drug is available in a specific country. DryCleaningOrLaundry: URIRef # A dry-cleaning business. Duration: URIRef # Quantity: Duration (use [ISO 8601 duration format](http://en.wikipedia.org/wiki/ISO_8601)). EBook: URIRef # Book format: Ebook. EPRelease: URIRef # EPRelease. EUEnergyEfficiencyCategoryA: URIRef # Represents EU Energy Efficiency Class A as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryA1Plus: URIRef # Represents EU Energy Efficiency Class A+ as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryA2Plus: URIRef # Represents EU Energy Efficiency Class A++ as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryA3Plus: URIRef # Represents EU Energy Efficiency Class A+++ as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryB: URIRef # Represents EU Energy Efficiency Class B as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryC: URIRef # Represents EU Energy Efficiency Class C as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryD: URIRef # Represents EU Energy Efficiency Class D as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryE: URIRef # Represents EU Energy Efficiency Class E as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryF: URIRef # Represents EU Energy Efficiency Class F as defined in EU energy labeling regulations. EUEnergyEfficiencyCategoryG: URIRef # Represents EU Energy Efficiency Class G as defined in EU energy labeling regulations. EUEnergyEfficiencyEnumeration: URIRef # Enumerates the EU energy efficiency classes A-G as well as A+, A++, and A+++ as defined in EU directive 2017/1369. Ear: URIRef # Ear function assessment with clinical examination. EatAction: URIRef # The act of swallowing solid objects. EditedOrCroppedContent: URIRef # Content coded 'edited or cropped content' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'edited or cropped content': The video has been edited or rearranged. This category applies to time edits, including editing multiple videos together to alter the story being told or editing out large portions from a video. For an [[ImageObject]] to be 'edited or cropped content': Presenting a part of an image from a larger whole to mislead the viewer. For an [[ImageObject]] with embedded text to be 'edited or cropped content': Presenting a part of an image from a larger whole to mislead the viewer. For an [[AudioObject]] to be 'edited or cropped content': The audio has been edited or rearranged. This category applies to time edits, including editing multiple audio clips together to alter the story being told or editing out large portions from the recording. EducationEvent: URIRef # Event type: Education event. EducationalAudience: URIRef # An EducationalAudience. EducationalOccupationalCredential: URIRef # An educational or occupational credential. A diploma, academic degree, certification, qualification, badge, etc., that may be awarded to a person or other entity that meets the requirements defined by the credentialer. EducationalOccupationalProgram: URIRef # A program offered by an institution which determines the learning progress to achieve an outcome, usually a credential like a degree or certificate. This would define a discrete set of opportunities (e.g., job, courses) that together constitute a program with a clear start, end, set of requirements, and transition to a new occupational opportunity (e.g., a job), or sometimes a higher educational opportunity (e.g., an advanced degree). EducationalOrganization: URIRef # An educational organization. EffectivenessHealthAspect: URIRef # Content about the effectiveness-related aspects of a health topic. Electrician: URIRef # An electrician. ElectronicsStore: URIRef # An electronics store. ElementarySchool: URIRef # An elementary school. EmailMessage: URIRef # An email message. Embassy: URIRef # An embassy. Emergency: URIRef # A specific branch of medical science that deals with the evaluation and initial treatment of medical conditions caused by trauma or sudden illness. EmergencyService: URIRef # An emergency service, such as a fire station or ER. EmployeeRole: URIRef # A subclass of OrganizationRole used to describe employee relationships. EmployerAggregateRating: URIRef # An aggregate rating of an Organization related to its role as an employer. EmployerReview: URIRef # An [[EmployerReview]] is a review of an [[Organization]] regarding its role as an employer, written by a current or former employee of that organization. EmploymentAgency: URIRef # An employment agency. Endocrine: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of endocrine glands and their secretions. EndorseAction: URIRef # An agent approves/certifies/likes/supports/sanction an object. EndorsementRating: URIRef # An EndorsementRating is a rating that expresses some level of endorsement, for example inclusion in a "critic's pick" blog, a "Like" or "+1" on a social network. It can be considered the [[result]] of an [[EndorseAction]] in which the [[object]] of the action is rated positively by some [[agent]]. As is common elsewhere in schema.org, it is sometimes more useful to describe the results of such an action without explicitly describing the [[Action]]. An [[EndorsementRating]] may be part of a numeric scale or organized system, but this is not required: having an explicit type for indicating a positive, endorsement rating is particularly useful in the absence of numeric scales as it helps consumers understand that the rating is broadly positive. Energy: URIRef # Properties that take Energy as values are of the form '<Number> <Energy unit of measure>'. EnergyConsumptionDetails: URIRef # EnergyConsumptionDetails represents information related to the energy efficiency of a product that consumes energy. The information that can be provided is based on international regulations such as for example [EU directive 2017/1369](https://eur-lex.europa.eu/eli/reg/2017/1369/oj) for energy labeling and the [Energy labeling rule](https://www.ftc.gov/enforcement/rules/rulemaking-regulatory-reform-proceedings/energy-water-use-labeling-consumer) under the Energy Policy and Conservation Act (EPCA) in the US. EnergyEfficiencyEnumeration: URIRef # Enumerates energy efficiency levels (also known as "classes" or "ratings") and certifications that are part of several international energy efficiency standards. EnergyStarCertified: URIRef # Represents EnergyStar certification. EnergyStarEnergyEfficiencyEnumeration: URIRef # Used to indicate whether a product is EnergyStar certified. EngineSpecification: URIRef # Information about the engine of the vehicle. A vehicle can have multiple engines represented by multiple engine specification entities. EnrollingByInvitation: URIRef # Enrolling participants by invitation only. EntertainmentBusiness: URIRef # A business providing entertainment. EntryPoint: URIRef # An entry point, within some Web-based protocol. Enumeration: URIRef # Lists or enumerations—for example, a list of cuisines or music genres, etc. Episode: URIRef # A media episode (e.g. TV, radio, video game) which can be part of a series or season. Event: URIRef # An event happening at a certain time and location, such as a concert, lecture, or festival. Ticketing information may be added via the [[offers]] property. Repeated events may be structured as separate Event objects. EventAttendanceModeEnumeration: URIRef # An EventAttendanceModeEnumeration value is one of potentially several modes of organising an event, relating to whether it is online or offline. EventCancelled: URIRef # The event has been cancelled. If the event has multiple startDate values, all are assumed to be cancelled. Either startDate or previousStartDate may be used to specify the event's cancelled date(s). EventMovedOnline: URIRef # Indicates that the event was changed to allow online participation. See [[eventAttendanceMode]] for specifics of whether it is now fully or partially online. EventPostponed: URIRef # The event has been postponed and no new date has been set. The event's previousStartDate should be set. EventRescheduled: URIRef # The event has been rescheduled. The event's previousStartDate should be set to the old date and the startDate should be set to the event's new date. (If the event has been rescheduled multiple times, the previousStartDate property may be repeated). EventReservation: URIRef # A reservation for an event like a concert, sporting event, or lecture.\n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use [[Offer]]. EventScheduled: URIRef # The event is taking place or has taken place on the startDate as scheduled. Use of this value is optional, as it is assumed by default. EventSeries: URIRef # A series of [[Event]]s. Included events can relate with the series using the [[superEvent]] property. An EventSeries is a collection of events that share some unifying characteristic. For example, "The Olympic Games" is a series, which is repeated regularly. The "2012 London Olympics" can be presented both as an [[Event]] in the series "Olympic Games", and as an [[EventSeries]] that included a number of sporting competitions as Events. The nature of the association between the events in an [[EventSeries]] can vary, but typical examples could include a thematic event series (e.g. topical meetups or classes), or a series of regular events that share a location, attendee group and/or organizers. EventSeries has been defined as a kind of Event to make it easy for publishers to use it in an Event context without worrying about which kinds of series are really event-like enough to call an Event. In general an EventSeries may seem more Event-like when the period of time is compact and when aspects such as location are fixed, but it may also sometimes prove useful to describe a longer-term series as an Event. EventStatusType: URIRef # EventStatusType is an enumeration type whose instances represent several states that an Event may be in. EventVenue: URIRef # An event venue. EvidenceLevelA: URIRef # Data derived from multiple randomized clinical trials or meta-analyses. EvidenceLevelB: URIRef # Data derived from a single randomized trial, or nonrandomized studies. EvidenceLevelC: URIRef # Only consensus opinion of experts, case studies, or standard-of-care. ExchangeRateSpecification: URIRef # A structured value representing exchange rate. ExchangeRefund: URIRef # Specifies that a refund can be done as an exchange for the same product. ExerciseAction: URIRef # The act of participating in exertive activity for the purposes of improving health and fitness. ExerciseGym: URIRef # A gym. ExercisePlan: URIRef # Fitness-related activity designed for a specific health-related purpose, including defined exercise routines as well as activity prescribed by a clinician. ExhibitionEvent: URIRef # Event type: Exhibition event, e.g. at a museum, library, archive, tradeshow, ... Eye: URIRef # Eye or ophtalmological function assessment with clinical examination. FAQPage: URIRef # A [[FAQPage]] is a [[WebPage]] presenting one or more "[Frequently asked questions](https://en.wikipedia.org/wiki/FAQ)" (see also [[QAPage]]). FDAcategoryA: URIRef # A designation by the US FDA signifying that adequate and well-controlled studies have failed to demonstrate a risk to the fetus in the first trimester of pregnancy (and there is no evidence of risk in later trimesters). FDAcategoryB: URIRef # A designation by the US FDA signifying that animal reproduction studies have failed to demonstrate a risk to the fetus and there are no adequate and well-controlled studies in pregnant women. FDAcategoryC: URIRef # A designation by the US FDA signifying that animal reproduction studies have shown an adverse effect on the fetus and there are no adequate and well-controlled studies in humans, but potential benefits may warrant use of the drug in pregnant women despite potential risks. FDAcategoryD: URIRef # A designation by the US FDA signifying that there is positive evidence of human fetal risk based on adverse reaction data from investigational or marketing experience or studies in humans, but potential benefits may warrant use of the drug in pregnant women despite potential risks. FDAcategoryX: URIRef # A designation by the US FDA signifying that studies in animals or humans have demonstrated fetal abnormalities and/or there is positive evidence of human fetal risk based on adverse reaction data from investigational or marketing experience, and the risks involved in use of the drug in pregnant women clearly outweigh potential benefits. FDAnotEvaluated: URIRef # A designation that the drug in question has not been assigned a pregnancy category designation by the US FDA. FMRadioChannel: URIRef # A radio channel that uses FM. FailedActionStatus: URIRef # An action that failed to complete. The action's error property and the HTTP return code contain more information about the failure. # False: URIRef # The boolean value false. FastFoodRestaurant: URIRef # A fast-food restaurant. Female: URIRef # The female gender. Festival: URIRef # Event type: Festival. FilmAction: URIRef # The act of capturing sound and moving images on film, video, or digitally. FinancialProduct: URIRef # A product provided to consumers and businesses by financial institutions such as banks, insurance companies, brokerage firms, consumer finance companies, and investment companies which comprise the financial services industry. FinancialService: URIRef # Financial services business. FindAction: URIRef # The act of finding an object.\n\nRelated actions:\n\n* [[SearchAction]]: FindAction is generally lead by a SearchAction, but not necessarily. FireStation: URIRef # A fire station. With firemen. Flexibility: URIRef # Physical activity that is engaged in to improve joint and muscle flexibility. Flight: URIRef # An airline flight. FlightReservation: URIRef # A reservation for air travel.\n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use [[Offer]]. Float: URIRef # Data type: Floating number. FloorPlan: URIRef # A FloorPlan is an explicit representation of a collection of similar accommodations, allowing the provision of common information (room counts, sizes, layout diagrams) and offers for rental or sale. In typical use, some [[ApartmentComplex]] has an [[accommodationFloorPlan]] which is a [[FloorPlan]]. A FloorPlan is always in the context of a particular place, either a larger [[ApartmentComplex]] or a single [[Apartment]]. The visual/spatial aspects of a floor plan (i.e. room layout, [see wikipedia](https://en.wikipedia.org/wiki/Floor_plan)) can be indicated using [[image]]. Florist: URIRef # A florist. FollowAction: URIRef # The act of forming a personal connection with someone/something (object) unidirectionally/asymmetrically to get updates polled from.\n\nRelated actions:\n\n* [[BefriendAction]]: Unlike BefriendAction, FollowAction implies that the connection is *not* necessarily reciprocal.\n* [[SubscribeAction]]: Unlike SubscribeAction, FollowAction implies that the follower acts as an active agent constantly/actively polling for updates.\n* [[RegisterAction]]: Unlike RegisterAction, FollowAction implies that the agent is interested in continuing receiving updates from the object.\n* [[JoinAction]]: Unlike JoinAction, FollowAction implies that the agent is interested in getting updates from the object.\n* [[TrackAction]]: Unlike TrackAction, FollowAction refers to the polling of updates of all aspects of animate objects rather than the location of inanimate objects (e.g. you track a package, but you don't follow it). FoodEstablishment: URIRef # A food-related business. FoodEstablishmentReservation: URIRef # A reservation to dine at a food-related business.\n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. FoodEvent: URIRef # Event type: Food event. FoodService: URIRef # A food service, like breakfast, lunch, or dinner. FourWheelDriveConfiguration: URIRef # Four-wheel drive is a transmission layout where the engine primarily drives two wheels with a part-time four-wheel drive capability. FreeReturn: URIRef # Specifies that product returns are free of charge for the customer. Friday: URIRef # The day of the week between Thursday and Saturday. FrontWheelDriveConfiguration: URIRef # Front-wheel drive is a transmission layout where the engine drives the front wheels. FullRefund: URIRef # Specifies that a refund can be done in the full amount the customer paid for the product FundingAgency: URIRef # A FundingAgency is an organization that implements one or more [[FundingScheme]]s and manages the granting process (via [[Grant]]s, typically [[MonetaryGrant]]s). A funding agency is not always required for grant funding, e.g. philanthropic giving, corporate sponsorship etc. Examples of funding agencies include ERC, REA, NIH, Bill and Melinda Gates Foundation... FundingScheme: URIRef # A FundingScheme combines organizational, project and policy aspects of grant-based funding that sets guidelines, principles and mechanisms to support other kinds of projects and activities. Funding is typically organized via [[Grant]] funding. Examples of funding schemes: Swiss Priority Programmes (SPPs); EU Framework 7 (FP7); Horizon 2020; the NIH-R01 Grant Program; Wellcome institutional strategic support fund. For large scale public sector funding, the management and administration of grant awards is often handled by other, dedicated, organizations - [[FundingAgency]]s such as ERC, REA, ... Fungus: URIRef # Pathogenic fungus. FurnitureStore: URIRef # A furniture store. Game: URIRef # The Game type represents things which are games. These are typically rule-governed recreational activities, e.g. role-playing games in which players assume the role of characters in a fictional setting. GamePlayMode: URIRef # Indicates whether this game is multi-player, co-op or single-player. GameServer: URIRef # Server that provides game interaction in a multiplayer game. GameServerStatus: URIRef # Status of a game server. GardenStore: URIRef # A garden store. GasStation: URIRef # A gas station. Gastroenterologic: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of digestive system. GatedResidenceCommunity: URIRef # Residence type: Gated community. GenderType: URIRef # An enumeration of genders. Gene: URIRef # A discrete unit of inheritance which affects one or more biological traits (Source: [https://en.wikipedia.org/wiki/Gene](https://en.wikipedia.org/wiki/Gene)). Examples include FOXP2 (Forkhead box protein P2), SCARNA21 (small Cajal body-specific RNA 21), A- (agouti genotype). GeneralContractor: URIRef # A general contractor. Genetic: URIRef # A specific branch of medical science that pertains to hereditary transmission and the variation of inherited characteristics and disorders. Genitourinary: URIRef # Genitourinary system function assessment with clinical examination. GeoCircle: URIRef # A GeoCircle is a GeoShape representing a circular geographic area. As it is a GeoShape it provides the simple textual property 'circle', but also allows the combination of postalCode alongside geoRadius. The center of the circle can be indicated via the 'geoMidpoint' property, or more approximately using 'address', 'postalCode'. GeoCoordinates: URIRef # The geographic coordinates of a place or event. GeoShape: URIRef # The geographic shape of a place. A GeoShape can be described using several properties whose values are based on latitude/longitude pairs. Either whitespace or commas can be used to separate latitude and longitude; whitespace should be used when writing a list of several such points. GeospatialGeometry: URIRef # (Eventually to be defined as) a supertype of GeoShape designed to accommodate definitions from Geo-Spatial best practices. Geriatric: URIRef # A specific branch of medical science that is concerned with the diagnosis and treatment of diseases, debilities and provision of care to the aged. GettingAccessHealthAspect: URIRef # Content that discusses practical and policy aspects for getting access to specific kinds of healthcare (e.g. distribution mechanisms for vaccines). GiveAction: URIRef # The act of transferring ownership of an object to a destination. Reciprocal of TakeAction.\n\nRelated actions:\n\n* [[TakeAction]]: Reciprocal of GiveAction.\n* [[SendAction]]: Unlike SendAction, GiveAction implies that ownership is being transferred (e.g. I may send my laptop to you, but that doesn't mean I'm giving it to you). GlutenFreeDiet: URIRef # A diet exclusive of gluten. GolfCourse: URIRef # A golf course. GovernmentBenefitsType: URIRef # GovernmentBenefitsType enumerates several kinds of government benefits to support the COVID-19 situation. Note that this structure may not capture all benefits offered. GovernmentBuilding: URIRef # A government building. GovernmentOffice: URIRef # A government office—for example, an IRS or DMV office. GovernmentOrganization: URIRef # A governmental organization or agency. GovernmentPermit: URIRef # A permit issued by a government agency. GovernmentService: URIRef # A service provided by a government organization, e.g. food stamps, veterans benefits, etc. Grant: URIRef # A grant, typically financial or otherwise quantifiable, of resources. Typically a [[funder]] sponsors some [[MonetaryAmount]] to an [[Organization]] or [[Person]], sometimes not necessarily via a dedicated or long-lived [[Project]], resulting in one or more outputs, or [[fundedItem]]s. For financial sponsorship, indicate the [[funder]] of a [[MonetaryGrant]]. For non-financial support, indicate [[sponsor]] of [[Grant]]s of resources (e.g. office space). Grants support activities directed towards some agreed collective goals, often but not always organized as [[Project]]s. Long-lived projects are sometimes sponsored by a variety of grants over time, but it is also common for a project to be associated with a single grant. The amount of a [[Grant]] is represented using [[amount]] as a [[MonetaryAmount]]. GraphicNovel: URIRef # Book format: GraphicNovel. May represent a bound collection of ComicIssue instances. GroceryStore: URIRef # A grocery store. GroupBoardingPolicy: URIRef # The airline boards by groups based on check-in time, priority, etc. Guide: URIRef # [[Guide]] is a page or article that recommend specific products or services, or aspects of a thing for a user to consider. A [[Guide]] may represent a Buying Guide and detail aspects of products or services for a user to consider. A [[Guide]] may represent a Product Guide and recommend specific products or services. A [[Guide]] may represent a Ranked List and recommend specific products or services with ranking. Gynecologic: URIRef # A specific branch of medical science that pertains to the health care of women, particularly in the diagnosis and treatment of disorders affecting the female reproductive system. HVACBusiness: URIRef # A business that provide Heating, Ventilation and Air Conditioning services. Hackathon: URIRef # A [hackathon](https://en.wikipedia.org/wiki/Hackathon) event. HairSalon: URIRef # A hair salon. HalalDiet: URIRef # A diet conforming to Islamic dietary practices. Hardcover: URIRef # Book format: Hardcover. HardwareStore: URIRef # A hardware store. Head: URIRef # Head assessment with clinical examination. HealthAndBeautyBusiness: URIRef # Health and beauty. HealthAspectEnumeration: URIRef # HealthAspectEnumeration enumerates several aspects of health content online, each of which might be described using [[hasHealthAspect]] and [[HealthTopicContent]]. HealthCare: URIRef # HealthCare: this is a benefit for health care. HealthClub: URIRef # A health club. HealthInsurancePlan: URIRef # A US-style health insurance plan, including PPOs, EPOs, and HMOs. HealthPlanCostSharingSpecification: URIRef # A description of costs to the patient under a given network or formulary. HealthPlanFormulary: URIRef # For a given health insurance plan, the specification for costs and coverage of prescription drugs. HealthPlanNetwork: URIRef # A US-style health insurance plan network. HealthTopicContent: URIRef # [[HealthTopicContent]] is [[WebContent]] that is about some aspect of a health topic, e.g. a condition, its symptoms or treatments. Such content may be comprised of several parts or sections and use different types of media. Multiple instances of [[WebContent]] (and hence [[HealthTopicContent]]) can be related using [[hasPart]] / [[isPartOf]] where there is some kind of content hierarchy, and their content described with [[about]] and [[mentions]] e.g. building upon the existing [[MedicalCondition]] vocabulary. HearingImpairedSupported: URIRef # Uses devices to support users with hearing impairments. Hematologic: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of blood and blood producing organs. HighSchool: URIRef # A high school. HinduDiet: URIRef # A diet conforming to Hindu dietary practices, in particular, beef-free. HinduTemple: URIRef # A Hindu temple. HobbyShop: URIRef # A store that sells materials useful or necessary for various hobbies. HomeAndConstructionBusiness: URIRef # A construction business.\n\nA HomeAndConstructionBusiness is a [[LocalBusiness]] that provides services around homes and buildings.\n\nAs a [[LocalBusiness]] it can be described as a [[provider]] of one or more [[Service]]\(s). HomeGoodsStore: URIRef # A home goods store. Homeopathic: URIRef # A system of medicine based on the principle that a disease can be cured by a substance that produces similar symptoms in healthy people. Hospital: URIRef # A hospital. Hostel: URIRef # A hostel - cheap accommodation, often in shared dormitories.

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. Hotel: URIRef # A hotel is an establishment that provides lodging paid on a short-term basis (Source: Wikipedia, the free encyclopedia, see http://en.wikipedia.org/wiki/Hotel).

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. HotelRoom: URIRef # A hotel room is a single room in a hotel.

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. House: URIRef # A house is a building or structure that has the ability to be occupied for habitation by humans or other creatures (Source: Wikipedia, the free encyclopedia, see http://en.wikipedia.org/wiki/House). HousePainter: URIRef # A house painting service. HowItWorksHealthAspect: URIRef # Content that discusses and explains how a particular health-related topic works, e.g. in terms of mechanisms and underlying science. HowOrWhereHealthAspect: URIRef # Information about how or where to find a topic. Also may contain location data that can be used for where to look for help if the topic is observed. HowTo: URIRef # Instructions that explain how to achieve a result by performing a sequence of steps. HowToDirection: URIRef # A direction indicating a single action to do in the instructions for how to achieve a result. HowToItem: URIRef # An item used as either a tool or supply when performing the instructions for how to to achieve a result. HowToSection: URIRef # A sub-grouping of steps in the instructions for how to achieve a result (e.g. steps for making a pie crust within a pie recipe). HowToStep: URIRef # A step in the instructions for how to achieve a result. It is an ordered list with HowToDirection and/or HowToTip items. HowToSupply: URIRef # A supply consumed when performing the instructions for how to achieve a result. HowToTip: URIRef # An explanation in the instructions for how to achieve a result. It provides supplementary information about a technique, supply, author's preference, etc. It can explain what could be done, or what should not be done, but doesn't specify what should be done (see HowToDirection). HowToTool: URIRef # A tool used (but not consumed) when performing instructions for how to achieve a result. HyperToc: URIRef # A HyperToc represents a hypertext table of contents for complex media objects, such as [[VideoObject]], [[AudioObject]]. Items in the table of contents are indicated using the [[tocEntry]] property, and typed [[HyperTocEntry]]. For cases where the same larger work is split into multiple files, [[associatedMedia]] can be used on individual [[HyperTocEntry]] items. HyperTocEntry: URIRef # A HyperToEntry is an item within a [[HyperToc]], which represents a hypertext table of contents for complex media objects, such as [[VideoObject]], [[AudioObject]]. The media object itself is indicated using [[associatedMedia]]. Each section of interest within that content can be described with a [[HyperTocEntry]], with associated [[startOffset]] and [[endOffset]]. When several entries are all from the same file, [[associatedMedia]] is used on the overarching [[HyperTocEntry]]; if the content has been split into multiple files, they can be referenced using [[associatedMedia]] on each [[HyperTocEntry]]. IceCreamShop: URIRef # An ice cream shop. IgnoreAction: URIRef # The act of intentionally disregarding the object. An agent ignores an object. ImageGallery: URIRef # Web page type: Image gallery page. ImageObject: URIRef # An image file. ImageObjectSnapshot: URIRef # A specific and exact (byte-for-byte) version of an [[ImageObject]]. Two byte-for-byte identical files, for the purposes of this type, considered identical. If they have different embedded metadata (e.g. XMP, EXIF) the files will differ. Different external facts about the files, e.g. creator or dateCreated that aren't represented in their actual content, do not affect this notion of identity. ImagingTest: URIRef # Any medical imaging modality typically used for diagnostic purposes. InForce: URIRef # Indicates that a legislation is in force. InStock: URIRef # Indicates that the item is in stock. InStoreOnly: URIRef # Indicates that the item is available only at physical locations. IndividualProduct: URIRef # A single, identifiable product instance (e.g. a laptop with a particular serial number). Infectious: URIRef # Something in medical science that pertains to infectious diseases i.e caused by bacterial, viral, fungal or parasitic infections. InfectiousAgentClass: URIRef # Classes of agents or pathogens that transmit infectious diseases. Enumerated type. InfectiousDisease: URIRef # An infectious disease is a clinically evident human disease resulting from the presence of pathogenic microbial agents, like pathogenic viruses, pathogenic bacteria, fungi, protozoa, multicellular parasites, and prions. To be considered an infectious disease, such pathogens are known to be able to cause this disease. InformAction: URIRef # The act of notifying someone of information pertinent to them, with no expectation of a response. IngredientsHealthAspect: URIRef # Content discussing ingredients-related aspects of a health topic. InsertAction: URIRef # The act of adding at a specific location in an ordered collection. InstallAction: URIRef # The act of installing an application. Installment: URIRef # Represents the installment pricing component of the total price for an offered product. InsuranceAgency: URIRef # An Insurance agency. Intangible: URIRef # A utility class that serves as the umbrella for a number of 'intangible' things such as quantities, structured values, etc. Integer: URIRef # Data type: Integer. InteractAction: URIRef # The act of interacting with another person or organization. InteractionCounter: URIRef # A summary of how users have interacted with this CreativeWork. In most cases, authors will use a subtype to specify the specific type of interaction. InternationalTrial: URIRef # An international trial. InternetCafe: URIRef # An internet cafe. InvestmentFund: URIRef # A company or fund that gathers capital from a number of investors to create a pool of money that is then re-invested into stocks, bonds and other assets. InvestmentOrDeposit: URIRef # A type of financial product that typically requires the client to transfer funds to a financial service in return for potential beneficial financial return. InviteAction: URIRef # The act of asking someone to attend an event. Reciprocal of RsvpAction. Invoice: URIRef # A statement of the money due for goods or services; a bill. InvoicePrice: URIRef # Represents the invoice price of an offered product. ItemAvailability: URIRef # A list of possible product availability options. ItemList: URIRef # A list of items of any sort—for example, Top 10 Movies About Weathermen, or Top 100 Party Songs. Not to be confused with HTML lists, which are often used only for formatting. ItemListOrderAscending: URIRef # An ItemList ordered with lower values listed first. ItemListOrderDescending: URIRef # An ItemList ordered with higher values listed first. ItemListOrderType: URIRef # Enumerated for values for itemListOrder for indicating how an ordered ItemList is organized. ItemListUnordered: URIRef # An ItemList ordered with no explicit order. ItemPage: URIRef # A page devoted to a single item, such as a particular product or hotel. JewelryStore: URIRef # A jewelry store. JobPosting: URIRef # A listing that describes a job opening in a certain organization. JoinAction: URIRef # An agent joins an event/group with participants/friends at a location.\n\nRelated actions:\n\n* [[RegisterAction]]: Unlike RegisterAction, JoinAction refers to joining a group/team of people.\n* [[SubscribeAction]]: Unlike SubscribeAction, JoinAction does not imply that you'll be receiving updates.\n* [[FollowAction]]: Unlike FollowAction, JoinAction does not imply that you'll be polling for updates. Joint: URIRef # The anatomical location at which two or more bones make contact. KosherDiet: URIRef # A diet conforming to Jewish dietary practices. LaboratoryScience: URIRef # A medical science pertaining to chemical, hematological, immunologic, microscopic, or bacteriological diagnostic analyses or research. LakeBodyOfWater: URIRef # A lake (for example, Lake Pontrachain). Landform: URIRef # A landform or physical feature. Landform elements include mountains, plains, lakes, rivers, seascape and oceanic waterbody interface features such as bays, peninsulas, seas and so forth, including sub-aqueous terrain features such as submersed mountain ranges, volcanoes, and the great ocean basins. LandmarksOrHistoricalBuildings: URIRef # An historical landmark or building. Language: URIRef # Natural languages such as Spanish, Tamil, Hindi, English, etc. Formal language code tags expressed in [BCP 47](https://en.wikipedia.org/wiki/IETF_language_tag) can be used via the [[alternateName]] property. The Language type previously also covered programming languages such as Scheme and Lisp, which are now best represented using [[ComputerLanguage]]. LaserDiscFormat: URIRef # LaserDiscFormat. LearningResource: URIRef # The LearningResource type can be used to indicate [[CreativeWork]]s (whether physical or digital) that have a particular and explicit orientation towards learning, education, skill acquisition, and other educational purposes. [[LearningResource]] is expected to be used as an addition to a primary type such as [[Book]], [[VideoObject]], [[Product]] etc. [[EducationEvent]] serves a similar purpose for event-like things (e.g. a [[Trip]]). A [[LearningResource]] may be created as a result of an [[EducationEvent]], for example by recording one. LeaveAction: URIRef # An agent leaves an event / group with participants/friends at a location.\n\nRelated actions:\n\n* [[JoinAction]]: The antonym of LeaveAction.\n* [[UnRegisterAction]]: Unlike UnRegisterAction, LeaveAction implies leaving a group/team of people rather than a service. LeftHandDriving: URIRef # The steering position is on the left side of the vehicle (viewed from the main direction of driving). LegalForceStatus: URIRef # A list of possible statuses for the legal force of a legislation. LegalService: URIRef # A LegalService is a business that provides legally-oriented services, advice and representation, e.g. law firms.\n\nAs a [[LocalBusiness]] it can be described as a [[provider]] of one or more [[Service]]\(s). LegalValueLevel: URIRef # A list of possible levels for the legal validity of a legislation. Legislation: URIRef # A legal document such as an act, decree, bill, etc. (enforceable or not) or a component of a legal act (like an article). LegislationObject: URIRef # A specific object or file containing a Legislation. Note that the same Legislation can be published in multiple files. For example, a digitally signed PDF, a plain PDF and an HTML version. LegislativeBuilding: URIRef # A legislative building—for example, the state capitol. LeisureTimeActivity: URIRef # Any physical activity engaged in for recreational purposes. Examples may include ballroom dancing, roller skating, canoeing, fishing, etc. LendAction: URIRef # The act of providing an object under an agreement that it will be returned at a later date. Reciprocal of BorrowAction.\n\nRelated actions:\n\n* [[BorrowAction]]: Reciprocal of LendAction. Library: URIRef # A library. LibrarySystem: URIRef # A [[LibrarySystem]] is a collaborative system amongst several libraries. LifestyleModification: URIRef # A process of care involving exercise, changes to diet, fitness routines, and other lifestyle changes aimed at improving a health condition. Ligament: URIRef # A short band of tough, flexible, fibrous connective tissue that functions to connect multiple bones, cartilages, and structurally support joints. LikeAction: URIRef # The act of expressing a positive sentiment about the object. An agent likes an object (a proposition, topic or theme) with participants. LimitedAvailability: URIRef # Indicates that the item has limited availability. LimitedByGuaranteeCharity: URIRef # LimitedByGuaranteeCharity: Non-profit type referring to a charitable company that is limited by guarantee (UK). LinkRole: URIRef # A Role that represents a Web link e.g. as expressed via the 'url' property. Its linkRelationship property can indicate URL-based and plain textual link types e.g. those in IANA link registry or others such as 'amphtml'. This structure provides a placeholder where details from HTML's link element can be represented outside of HTML, e.g. in JSON-LD feeds. LiquorStore: URIRef # A shop that sells alcoholic drinks such as wine, beer, whisky and other spirits. ListItem: URIRef # An list item, e.g. a step in a checklist or how-to description. ListPrice: URIRef # Represents the list price (the price a product is actually advertised for) of an offered product. ListenAction: URIRef # The act of consuming audio content. LiteraryEvent: URIRef # Event type: Literary event. LiveAlbum: URIRef # LiveAlbum. LiveBlogPosting: URIRef # A [[LiveBlogPosting]] is a [[BlogPosting]] intended to provide a rolling textual coverage of an ongoing event through continuous updates. LivingWithHealthAspect: URIRef # Information about coping or life related to the topic. LoanOrCredit: URIRef # A financial product for the loaning of an amount of money, or line of credit, under agreed terms and charges. LocalBusiness: URIRef # A particular physical business or branch of an organization. Examples of LocalBusiness include a restaurant, a particular branch of a restaurant chain, a branch of a bank, a medical practice, a club, a bowling alley, etc. LocationFeatureSpecification: URIRef # Specifies a location feature by providing a structured value representing a feature of an accommodation as a property-value pair of varying degrees of formality. LockerDelivery: URIRef # A DeliveryMethod in which an item is made available via locker. Locksmith: URIRef # A locksmith. LodgingBusiness: URIRef # A lodging business, such as a motel, hotel, or inn. LodgingReservation: URIRef # A reservation for lodging at a hotel, motel, inn, etc.\n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. Longitudinal: URIRef # Unlike cross-sectional studies, longitudinal studies track the same people, and therefore the differences observed in those people are less likely to be the result of cultural differences across generations. Longitudinal studies are also used in medicine to uncover predictors of certain diseases. LoseAction: URIRef # The act of being defeated in a competitive activity. LowCalorieDiet: URIRef # A diet focused on reduced calorie intake. LowFatDiet: URIRef # A diet focused on reduced fat and cholesterol intake. LowLactoseDiet: URIRef # A diet appropriate for people with lactose intolerance. LowSaltDiet: URIRef # A diet focused on reduced sodium intake. Lung: URIRef # Lung and respiratory system clinical examination. LymphaticVessel: URIRef # A type of blood vessel that specifically carries lymph fluid unidirectionally toward the heart. MRI: URIRef # Magnetic resonance imaging. MSRP: URIRef # Represents the manufacturer suggested retail price ("MSRP") of an offered product. Male: URIRef # The male gender. Manuscript: URIRef # A book, document, or piece of music written by hand rather than typed or printed. Map: URIRef # A map. MapCategoryType: URIRef # An enumeration of several kinds of Map. MarryAction: URIRef # The act of marrying a person. Mass: URIRef # Properties that take Mass as values are of the form '<Number> <Mass unit of measure>'. E.g., '7 kg'. MathSolver: URIRef # A math solver which is capable of solving a subset of mathematical problems. MaximumDoseSchedule: URIRef # The maximum dosing schedule considered safe for a drug or supplement as recommended by an authority or by the drug/supplement's manufacturer. Capture the recommending authority in the recognizingAuthority property of MedicalEntity. MayTreatHealthAspect: URIRef # Related topics may be treated by a Topic. MeasurementTypeEnumeration: URIRef # Enumeration of common measurement types (or dimensions), for example "chest" for a person, "inseam" for pants, "gauge" for screws, or "wheel" for bicycles. MediaGallery: URIRef # Web page type: Media gallery page. A mixed-media page that can contains media such as images, videos, and other multimedia. MediaManipulationRatingEnumeration: URIRef # Codes for use with the [[mediaAuthenticityCategory]] property, indicating the authenticity of a media object (in the context of how it was published or shared). In general these codes are not mutually exclusive, although some combinations (such as 'original' versus 'transformed', 'edited' and 'staged') would be contradictory if applied in the same [[MediaReview]]. Note that the application of these codes is with regard to a piece of media shared or published in a particular context. MediaObject: URIRef # A media object, such as an image, video, or audio object embedded in a web page or a downloadable dataset i.e. DataDownload. Note that a creative work may have many media objects associated with it on the same web page. For example, a page about a single song (MusicRecording) may have a music video (VideoObject), and a high and low bandwidth audio stream (2 AudioObject's). MediaReview: URIRef # A [[MediaReview]] is a more specialized form of Review dedicated to the evaluation of media content online, typically in the context of fact-checking and misinformation. For more general reviews of media in the broader sense, use [[UserReview]], [[CriticReview]] or other [[Review]] types. This definition is a work in progress. While the [[MediaManipulationRatingEnumeration]] list reflects significant community review amongst fact-checkers and others working to combat misinformation, the specific structures for representing media objects, their versions and publication context, is still evolving. Similarly, best practices for the relationship between [[MediaReview]] and [[ClaimReview]] markup has not yet been finalized. MediaReviewItem: URIRef # Represents an item or group of closely related items treated as a unit for the sake of evaluation in a [[MediaReview]]. Authorship etc. apply to the items rather than to the curation/grouping or reviewing party. MediaSubscription: URIRef # A subscription which allows a user to access media including audio, video, books, etc. MedicalAudience: URIRef # Target audiences for medical web pages. MedicalAudienceType: URIRef # Target audiences types for medical web pages. Enumerated type. MedicalBusiness: URIRef # A particular physical or virtual business of an organization for medical purposes. Examples of MedicalBusiness include differents business run by health professionals. MedicalCause: URIRef # The causative agent(s) that are responsible for the pathophysiologic process that eventually results in a medical condition, symptom or sign. In this schema, unless otherwise specified this is meant to be the proximate cause of the medical condition, symptom or sign. The proximate cause is defined as the causative agent that most directly results in the medical condition, symptom or sign. For example, the HIV virus could be considered a cause of AIDS. Or in a diagnostic context, if a patient fell and sustained a hip fracture and two days later sustained a pulmonary embolism which eventuated in a cardiac arrest, the cause of the cardiac arrest (the proximate cause) would be the pulmonary embolism and not the fall. Medical causes can include cardiovascular, chemical, dermatologic, endocrine, environmental, gastroenterologic, genetic, hematologic, gynecologic, iatrogenic, infectious, musculoskeletal, neurologic, nutritional, obstetric, oncologic, otolaryngologic, pharmacologic, psychiatric, pulmonary, renal, rheumatologic, toxic, traumatic, or urologic causes; medical conditions can be causes as well. MedicalClinic: URIRef # A facility, often associated with a hospital or medical school, that is devoted to the specific diagnosis and/or healthcare. Previously limited to outpatients but with evolution it may be open to inpatients as well. MedicalCode: URIRef # A code for a medical entity. MedicalCondition: URIRef # Any condition of the human body that affects the normal functioning of a person, whether physically or mentally. Includes diseases, injuries, disabilities, disorders, syndromes, etc. MedicalConditionStage: URIRef # A stage of a medical condition, such as 'Stage IIIa'. MedicalContraindication: URIRef # A condition or factor that serves as a reason to withhold a certain medical therapy. Contraindications can be absolute (there are no reasonable circumstances for undertaking a course of action) or relative (the patient is at higher risk of complications, but that these risks may be outweighed by other considerations or mitigated by other measures). MedicalDevice: URIRef # Any object used in a medical capacity, such as to diagnose or treat a patient. MedicalDevicePurpose: URIRef # Categories of medical devices, organized by the purpose or intended use of the device. MedicalEntity: URIRef # The most generic type of entity related to health and the practice of medicine. MedicalEnumeration: URIRef # Enumerations related to health and the practice of medicine: A concept that is used to attribute a quality to another concept, as a qualifier, a collection of items or a listing of all of the elements of a set in medicine practice. MedicalEvidenceLevel: URIRef # Level of evidence for a medical guideline. Enumerated type. MedicalGuideline: URIRef # Any recommendation made by a standard society (e.g. ACC/AHA) or consensus statement that denotes how to diagnose and treat a particular condition. Note: this type should be used to tag the actual guideline recommendation; if the guideline recommendation occurs in a larger scholarly article, use MedicalScholarlyArticle to tag the overall article, not this type. Note also: the organization making the recommendation should be captured in the recognizingAuthority base property of MedicalEntity. MedicalGuidelineContraindication: URIRef # A guideline contraindication that designates a process as harmful and where quality of the data supporting the contraindication is sound. MedicalGuidelineRecommendation: URIRef # A guideline recommendation that is regarded as efficacious and where quality of the data supporting the recommendation is sound. MedicalImagingTechnique: URIRef # Any medical imaging modality typically used for diagnostic purposes. Enumerated type. MedicalIndication: URIRef # A condition or factor that indicates use of a medical therapy, including signs, symptoms, risk factors, anatomical states, etc. MedicalIntangible: URIRef # A utility class that serves as the umbrella for a number of 'intangible' things in the medical space. MedicalObservationalStudy: URIRef # An observational study is a type of medical study that attempts to infer the possible effect of a treatment through observation of a cohort of subjects over a period of time. In an observational study, the assignment of subjects into treatment groups versus control groups is outside the control of the investigator. This is in contrast with controlled studies, such as the randomized controlled trials represented by MedicalTrial, where each subject is randomly assigned to a treatment group or a control group before the start of the treatment. MedicalObservationalStudyDesign: URIRef # Design models for observational medical studies. Enumerated type. MedicalOrganization: URIRef # A medical organization (physical or not), such as hospital, institution or clinic. MedicalProcedure: URIRef # A process of care used in either a diagnostic, therapeutic, preventive or palliative capacity that relies on invasive (surgical), non-invasive, or other techniques. MedicalProcedureType: URIRef # An enumeration that describes different types of medical procedures. MedicalResearcher: URIRef # Medical researchers. MedicalRiskCalculator: URIRef # A complex mathematical calculation requiring an online calculator, used to assess prognosis. Note: use the url property of Thing to record any URLs for online calculators. MedicalRiskEstimator: URIRef # Any rule set or interactive tool for estimating the risk of developing a complication or condition. MedicalRiskFactor: URIRef # A risk factor is anything that increases a person's likelihood of developing or contracting a disease, medical condition, or complication. MedicalRiskScore: URIRef # A simple system that adds up the number of risk factors to yield a score that is associated with prognosis, e.g. CHAD score, TIMI risk score. MedicalScholarlyArticle: URIRef # A scholarly article in the medical domain. MedicalSign: URIRef # Any physical manifestation of a person's medical condition discoverable by objective diagnostic tests or physical examination. MedicalSignOrSymptom: URIRef # Any feature associated or not with a medical condition. In medicine a symptom is generally subjective while a sign is objective. MedicalSpecialty: URIRef # Any specific branch of medical science or practice. Medical specialities include clinical specialties that pertain to particular organ systems and their respective disease states, as well as allied health specialties. Enumerated type. MedicalStudy: URIRef # A medical study is an umbrella type covering all kinds of research studies relating to human medicine or health, including observational studies and interventional trials and registries, randomized, controlled or not. When the specific type of study is known, use one of the extensions of this type, such as MedicalTrial or MedicalObservationalStudy. Also, note that this type should be used to mark up data that describes the study itself; to tag an article that publishes the results of a study, use MedicalScholarlyArticle. Note: use the code property of MedicalEntity to store study IDs, e.g. clinicaltrials.gov ID. MedicalStudyStatus: URIRef # The status of a medical study. Enumerated type. MedicalSymptom: URIRef # Any complaint sensed and expressed by the patient (therefore defined as subjective) like stomachache, lower-back pain, or fatigue. MedicalTest: URIRef # Any medical test, typically performed for diagnostic purposes. MedicalTestPanel: URIRef # Any collection of tests commonly ordered together. MedicalTherapy: URIRef # Any medical intervention designed to prevent, treat, and cure human diseases and medical conditions, including both curative and palliative therapies. Medical therapies are typically processes of care relying upon pharmacotherapy, behavioral therapy, supportive therapy (with fluid or nutrition for example), or detoxification (e.g. hemodialysis) aimed at improving or preventing a health condition. MedicalTrial: URIRef # A medical trial is a type of medical study that uses scientific process used to compare the safety and efficacy of medical therapies or medical procedures. In general, medical trials are controlled and subjects are allocated at random to the different treatment and/or control groups. MedicalTrialDesign: URIRef # Design models for medical trials. Enumerated type. MedicalWebPage: URIRef # A web page that provides medical information. MedicineSystem: URIRef # Systems of medical practice. MeetingRoom: URIRef # A meeting room, conference room, or conference hall is a room provided for singular events such as business conferences and meetings (Source: Wikipedia, the free encyclopedia, see http://en.wikipedia.org/wiki/Conference_hall).

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. MensClothingStore: URIRef # A men's clothing store. Menu: URIRef # A structured representation of food or drink items available from a FoodEstablishment. MenuItem: URIRef # A food or drink item listed in a menu or menu section. MenuSection: URIRef # A sub-grouping of food or drink items in a menu. E.g. courses (such as 'Dinner', 'Breakfast', etc.), specific type of dishes (such as 'Meat', 'Vegan', 'Drinks', etc.), or some other classification made by the menu provider. MerchantReturnEnumeration: URIRef # Enumerates several kinds of product return policies. MerchantReturnFiniteReturnWindow: URIRef # Specifies that there is a finite window for product returns. MerchantReturnNotPermitted: URIRef # Specifies that product returns are not permitted. MerchantReturnPolicy: URIRef # A MerchantReturnPolicy provides information about product return policies associated with an [[Organization]], [[Product]], or [[Offer]]. MerchantReturnPolicySeasonalOverride: URIRef # A seasonal override of a return policy, for example used for holidays. MerchantReturnUnlimitedWindow: URIRef # Specifies that there is an unlimited window for product returns. MerchantReturnUnspecified: URIRef # Specifies that a product return policy is not provided. Message: URIRef # A single message from a sender to one or more organizations or people. MiddleSchool: URIRef # A middle school (typically for children aged around 11-14, although this varies somewhat). Midwifery: URIRef # A nurse-like health profession that deals with pregnancy, childbirth, and the postpartum period (including care of the newborn), besides sexual and reproductive health of women throughout their lives. MinimumAdvertisedPrice: URIRef # Represents the minimum advertised price ("MAP") (as dictated by the manufacturer) of an offered product. MisconceptionsHealthAspect: URIRef # Content about common misconceptions and myths that are related to a topic. MixedEventAttendanceMode: URIRef # MixedEventAttendanceMode - an event that is conducted as a combination of both offline and online modes. MixtapeAlbum: URIRef # MixtapeAlbum. MobileApplication: URIRef # A software application designed specifically to work well on a mobile device such as a telephone. MobilePhoneStore: URIRef # A store that sells mobile phones and related accessories. MolecularEntity: URIRef # Any constitutionally or isotopically distinct atom, molecule, ion, ion pair, radical, radical ion, complex, conformer etc., identifiable as a separately distinguishable entity. Monday: URIRef # The day of the week between Sunday and Tuesday. MonetaryAmount: URIRef # A monetary value or range. This type can be used to describe an amount of money such as $50 USD, or a range as in describing a bank account being suitable for a balance between £1,000 and £1,000,000 GBP, or the value of a salary, etc. It is recommended to use [[PriceSpecification]] Types to describe the price of an Offer, Invoice, etc. MonetaryAmountDistribution: URIRef # A statistical distribution of monetary amounts. MonetaryGrant: URIRef # A monetary grant. MoneyTransfer: URIRef # The act of transferring money from one place to another place. This may occur electronically or physically. MortgageLoan: URIRef # A loan in which property or real estate is used as collateral. (A loan securitized against some real estate). Mosque: URIRef # A mosque. Motel: URIRef # A motel.

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. Motorcycle: URIRef # A motorcycle or motorbike is a single-track, two-wheeled motor vehicle. MotorcycleDealer: URIRef # A motorcycle dealer. MotorcycleRepair: URIRef # A motorcycle repair shop. MotorizedBicycle: URIRef # A motorized bicycle is a bicycle with an attached motor used to power the vehicle, or to assist with pedaling. Mountain: URIRef # A mountain, like Mount Whitney or Mount Everest. MoveAction: URIRef # The act of an agent relocating to a place.\n\nRelated actions:\n\n* [[TransferAction]]: Unlike TransferAction, the subject of the move is a living Person or Organization rather than an inanimate object. Movie: URIRef # A movie. MovieClip: URIRef # A short segment/part of a movie. MovieRentalStore: URIRef # A movie rental store. MovieSeries: URIRef # A series of movies. Included movies can be indicated with the hasPart property. MovieTheater: URIRef # A movie theater. MovingCompany: URIRef # A moving company. MultiCenterTrial: URIRef # A trial that takes place at multiple centers. MultiPlayer: URIRef # Play mode: MultiPlayer. Requiring or allowing multiple human players to play simultaneously. MulticellularParasite: URIRef # Multicellular parasite that causes an infection. Muscle: URIRef # A muscle is an anatomical structure consisting of a contractile form of tissue that animals use to effect movement. Musculoskeletal: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of muscles, ligaments and skeletal system. MusculoskeletalExam: URIRef # Musculoskeletal system clinical examination. Museum: URIRef # A museum. MusicAlbum: URIRef # A collection of music tracks. MusicAlbumProductionType: URIRef # Classification of the album by it's type of content: soundtrack, live album, studio album, etc. MusicAlbumReleaseType: URIRef # The kind of release which this album is: single, EP or album. MusicComposition: URIRef # A musical composition. MusicEvent: URIRef # Event type: Music event. MusicGroup: URIRef # A musical group, such as a band, an orchestra, or a choir. Can also be a solo musician. MusicPlaylist: URIRef # A collection of music tracks in playlist form. MusicRecording: URIRef # A music recording (track), usually a single song. MusicRelease: URIRef # A MusicRelease is a specific release of a music album. MusicReleaseFormatType: URIRef # Format of this release (the type of recording media used, ie. compact disc, digital media, LP, etc.). MusicStore: URIRef # A music store. MusicVenue: URIRef # A music venue. MusicVideoObject: URIRef # A music video file. NGO: URIRef # Organization: Non-governmental Organization. NLNonprofitType: URIRef # NLNonprofitType: Non-profit organization type originating from the Netherlands. NailSalon: URIRef # A nail salon. Neck: URIRef # Neck assessment with clinical examination. Nerve: URIRef # A common pathway for the electrochemical nerve impulses that are transmitted along each of the axons. Neuro: URIRef # Neurological system clinical examination. Neurologic: URIRef # A specific branch of medical science that studies the nerves and nervous system and its respective disease states. NewCondition: URIRef # Indicates that the item is new. NewsArticle: URIRef # A NewsArticle is an article whose content reports news, or provides background context and supporting materials for understanding the news. A more detailed overview of [schema.org News markup](/docs/news.html) is also available. NewsMediaOrganization: URIRef # A News/Media organization such as a newspaper or TV station. Newspaper: URIRef # A publication containing information about varied topics that are pertinent to general information, a geographic area, or a specific subject matter (i.e. business, culture, education). Often published daily. NightClub: URIRef # A nightclub or discotheque. NoninvasiveProcedure: URIRef # A type of medical procedure that involves noninvasive techniques. Nonprofit501a: URIRef # Nonprofit501a: Non-profit type referring to Farmers’ Cooperative Associations. Nonprofit501c1: URIRef # Nonprofit501c1: Non-profit type referring to Corporations Organized Under Act of Congress, including Federal Credit Unions and National Farm Loan Associations. Nonprofit501c10: URIRef # Nonprofit501c10: Non-profit type referring to Domestic Fraternal Societies and Associations. Nonprofit501c11: URIRef # Nonprofit501c11: Non-profit type referring to Teachers' Retirement Fund Associations. Nonprofit501c12: URIRef # Nonprofit501c12: Non-profit type referring to Benevolent Life Insurance Associations, Mutual Ditch or Irrigation Companies, Mutual or Cooperative Telephone Companies. Nonprofit501c13: URIRef # Nonprofit501c13: Non-profit type referring to Cemetery Companies. Nonprofit501c14: URIRef # Nonprofit501c14: Non-profit type referring to State-Chartered Credit Unions, Mutual Reserve Funds. Nonprofit501c15: URIRef # Nonprofit501c15: Non-profit type referring to Mutual Insurance Companies or Associations. Nonprofit501c16: URIRef # Nonprofit501c16: Non-profit type referring to Cooperative Organizations to Finance Crop Operations. Nonprofit501c17: URIRef # Nonprofit501c17: Non-profit type referring to Supplemental Unemployment Benefit Trusts. Nonprofit501c18: URIRef # Nonprofit501c18: Non-profit type referring to Employee Funded Pension Trust (created before 25 June 1959). Nonprofit501c19: URIRef # Nonprofit501c19: Non-profit type referring to Post or Organization of Past or Present Members of the Armed Forces. Nonprofit501c2: URIRef # Nonprofit501c2: Non-profit type referring to Title-holding Corporations for Exempt Organizations. Nonprofit501c20: URIRef # Nonprofit501c20: Non-profit type referring to Group Legal Services Plan Organizations. Nonprofit501c21: URIRef # Nonprofit501c21: Non-profit type referring to Black Lung Benefit Trusts. Nonprofit501c22: URIRef # Nonprofit501c22: Non-profit type referring to Withdrawal Liability Payment Funds. Nonprofit501c23: URIRef # Nonprofit501c23: Non-profit type referring to Veterans Organizations. Nonprofit501c24: URIRef # Nonprofit501c24: Non-profit type referring to Section 4049 ERISA Trusts. Nonprofit501c25: URIRef # Nonprofit501c25: Non-profit type referring to Real Property Title-Holding Corporations or Trusts with Multiple Parents. Nonprofit501c26: URIRef # Nonprofit501c26: Non-profit type referring to State-Sponsored Organizations Providing Health Coverage for High-Risk Individuals. Nonprofit501c27: URIRef # Nonprofit501c27: Non-profit type referring to State-Sponsored Workers' Compensation Reinsurance Organizations. Nonprofit501c28: URIRef # Nonprofit501c28: Non-profit type referring to National Railroad Retirement Investment Trusts. Nonprofit501c3: URIRef # Nonprofit501c3: Non-profit type referring to Religious, Educational, Charitable, Scientific, Literary, Testing for Public Safety, to Foster National or International Amateur Sports Competition, or Prevention of Cruelty to Children or Animals Organizations. Nonprofit501c4: URIRef # Nonprofit501c4: Non-profit type referring to Civic Leagues, Social Welfare Organizations, and Local Associations of Employees. Nonprofit501c5: URIRef # Nonprofit501c5: Non-profit type referring to Labor, Agricultural and Horticultural Organizations. Nonprofit501c6: URIRef # Nonprofit501c6: Non-profit type referring to Business Leagues, Chambers of Commerce, Real Estate Boards. Nonprofit501c7: URIRef # Nonprofit501c7: Non-profit type referring to Social and Recreational Clubs. Nonprofit501c8: URIRef # Nonprofit501c8: Non-profit type referring to Fraternal Beneficiary Societies and Associations. Nonprofit501c9: URIRef # Nonprofit501c9: Non-profit type referring to Voluntary Employee Beneficiary Associations. Nonprofit501d: URIRef # Nonprofit501d: Non-profit type referring to Religious and Apostolic Associations. Nonprofit501e: URIRef # Nonprofit501e: Non-profit type referring to Cooperative Hospital Service Organizations. Nonprofit501f: URIRef # Nonprofit501f: Non-profit type referring to Cooperative Service Organizations. Nonprofit501k: URIRef # Nonprofit501k: Non-profit type referring to Child Care Organizations. Nonprofit501n: URIRef # Nonprofit501n: Non-profit type referring to Charitable Risk Pools. Nonprofit501q: URIRef # Nonprofit501q: Non-profit type referring to Credit Counseling Organizations. Nonprofit527: URIRef # Nonprofit527: Non-profit type referring to Political organizations. NonprofitANBI: URIRef # NonprofitANBI: Non-profit type referring to a Public Benefit Organization (NL). NonprofitSBBI: URIRef # NonprofitSBBI: Non-profit type referring to a Social Interest Promoting Institution (NL). NonprofitType: URIRef # NonprofitType enumerates several kinds of official non-profit types of which a non-profit organization can be. Nose: URIRef # Nose function assessment with clinical examination. NotInForce: URIRef # Indicates that a legislation is currently not in force. NotYetRecruiting: URIRef # Not yet recruiting. Notary: URIRef # A notary. NoteDigitalDocument: URIRef # A file containing a note, primarily for the author. Number: URIRef # Data type: Number.\n\nUsage guidelines:\n\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator. Nursing: URIRef # A health profession of a person formally educated and trained in the care of the sick or infirm person. NutritionInformation: URIRef # Nutritional information about the recipe. OTC: URIRef # The character of a medical substance, typically a medicine, of being available over the counter or not. Observation: URIRef # Instances of the class [[Observation]] are used to specify observations about an entity (which may or may not be an instance of a [[StatisticalPopulation]]), at a particular time. The principal properties of an [[Observation]] are [[observedNode]], [[measuredProperty]], [[measuredValue]] (or [[median]], etc.) and [[observationDate]] ([[measuredProperty]] properties can, but need not always, be W3C RDF Data Cube "measure properties", as in the [lifeExpectancy example](https://www.w3.org/TR/vocab-data-cube/#dsd-example)). See also [[StatisticalPopulation]], and the [data and datasets](/docs/data-and-datasets.html) overview for more details. Observational: URIRef # An observational study design. Obstetric: URIRef # A specific branch of medical science that specializes in the care of women during the prenatal and postnatal care and with the delivery of the child. Occupation: URIRef # A profession, may involve prolonged training and/or a formal qualification. OccupationalActivity: URIRef # Any physical activity engaged in for job-related purposes. Examples may include waiting tables, maid service, carrying a mailbag, picking fruits or vegetables, construction work, etc. OccupationalExperienceRequirements: URIRef # Indicates employment-related experience requirements, e.g. [[monthsOfExperience]]. OccupationalTherapy: URIRef # A treatment of people with physical, emotional, or social problems, using purposeful activity to help them overcome or learn to deal with their problems. OceanBodyOfWater: URIRef # An ocean (for example, the Pacific). Offer: URIRef # An offer to transfer some rights to an item or to provide a service — for example, an offer to sell tickets to an event, to rent the DVD of a movie, to stream a TV show over the internet, to repair a motorcycle, or to loan a book.\n\nNote: As the [[businessFunction]] property, which identifies the form of offer (e.g. sell, lease, repair, dispose), defaults to http://purl.org/goodrelations/v1#Sell; an Offer without a defined businessFunction value can be assumed to be an offer to sell.\n\nFor [GTIN](http://www.gs1.org/barcodes/technical/idkeys/gtin)-related fields, see [Check Digit calculator](http://www.gs1.org/barcodes/support/check_digit_calculator) and [validation guide](http://www.gs1us.org/resources/standards/gtin-validation-guide) from [GS1](http://www.gs1.org/). OfferCatalog: URIRef # An OfferCatalog is an ItemList that contains related Offers and/or further OfferCatalogs that are offeredBy the same provider. OfferForLease: URIRef # An [[OfferForLease]] in Schema.org represents an [[Offer]] to lease out something, i.e. an [[Offer]] whose [[businessFunction]] is [lease out](http://purl.org/goodrelations/v1#LeaseOut.). See [Good Relations](https://en.wikipedia.org/wiki/GoodRelations) for background on the underlying concepts. OfferForPurchase: URIRef # An [[OfferForPurchase]] in Schema.org represents an [[Offer]] to sell something, i.e. an [[Offer]] whose [[businessFunction]] is [sell](http://purl.org/goodrelations/v1#Sell.). See [Good Relations](https://en.wikipedia.org/wiki/GoodRelations) for background on the underlying concepts. OfferItemCondition: URIRef # A list of possible conditions for the item. OfferShippingDetails: URIRef # OfferShippingDetails represents information about shipping destinations. Multiple of these entities can be used to represent different shipping rates for different destinations: One entity for Alaska/Hawaii. A different one for continental US.A different one for all France. Multiple of these entities can be used to represent different shipping costs and delivery times. Two entities that are identical but differ in rate and time: e.g. Cheaper and slower: $5 in 5-7days or Fast and expensive: $15 in 1-2 days. OfficeEquipmentStore: URIRef # An office equipment store. OfficialLegalValue: URIRef # All the documents published by an official publisher should have at least the legal value level "OfficialLegalValue". This indicates that the document was published by an organisation with the public task of making it available (e.g. a consolidated version of a EU directive published by the EU Office of Publications). OfflineEventAttendanceMode: URIRef # OfflineEventAttendanceMode - an event that is primarily conducted offline. OfflinePermanently: URIRef # Game server status: OfflinePermanently. Server is offline and not available. OfflineTemporarily: URIRef # Game server status: OfflineTemporarily. Server is offline now but it can be online soon. OnDemandEvent: URIRef # A publication event e.g. catch-up TV or radio podcast, during which a program is available on-demand. OnSitePickup: URIRef # A DeliveryMethod in which an item is collected on site, e.g. in a store or at a box office. Oncologic: URIRef # A specific branch of medical science that deals with benign and malignant tumors, including the study of their development, diagnosis, treatment and prevention. OneTimePayments: URIRef # OneTimePayments: this is a benefit for one-time payments for individuals. Online: URIRef # Game server status: Online. Server is available. OnlineEventAttendanceMode: URIRef # OnlineEventAttendanceMode - an event that is primarily conducted online. OnlineFull: URIRef # Game server status: OnlineFull. Server is online but unavailable. The maximum number of players has reached. OnlineOnly: URIRef # Indicates that the item is available only online. OpenTrial: URIRef # A trial design in which the researcher knows the full details of the treatment, and so does the patient. OpeningHoursSpecification: URIRef # A structured value providing information about the opening hours of a place or a certain service inside a place.\n\n The place is __open__ if the [[opens]] property is specified, and __closed__ otherwise.\n\nIf the value for the [[closes]] property is less than the value for the [[opens]] property then the hour range is assumed to span over the next day. OpinionNewsArticle: URIRef # An [[OpinionNewsArticle]] is a [[NewsArticle]] that primarily expresses opinions rather than journalistic reporting of news and events. For example, a [[NewsArticle]] consisting of a column or [[Blog]]/[[BlogPosting]] entry in the Opinions section of a news publication. Optician: URIRef # A store that sells reading glasses and similar devices for improving vision. Optometric: URIRef # The science or practice of testing visual acuity and prescribing corrective lenses. Order: URIRef # An order is a confirmation of a transaction (a receipt), which can contain multiple line items, each represented by an Offer that has been accepted by the customer. OrderAction: URIRef # An agent orders an object/product/service to be delivered/sent. OrderCancelled: URIRef # OrderStatus representing cancellation of an order. OrderDelivered: URIRef # OrderStatus representing successful delivery of an order. OrderInTransit: URIRef # OrderStatus representing that an order is in transit. OrderItem: URIRef # An order item is a line of an order. It includes the quantity and shipping details of a bought offer. OrderPaymentDue: URIRef # OrderStatus representing that payment is due on an order. OrderPickupAvailable: URIRef # OrderStatus representing availability of an order for pickup. OrderProblem: URIRef # OrderStatus representing that there is a problem with the order. OrderProcessing: URIRef # OrderStatus representing that an order is being processed. OrderReturned: URIRef # OrderStatus representing that an order has been returned. OrderStatus: URIRef # Enumerated status values for Order. Organization: URIRef # An organization such as a school, NGO, corporation, club, etc. OrganizationRole: URIRef # A subclass of Role used to describe roles within organizations. OrganizeAction: URIRef # The act of manipulating/administering/supervising/controlling one or more objects. OriginalMediaContent: URIRef # Content coded 'as original media content' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'original': No evidence the footage has been misleadingly altered or manipulated, though it may contain false or misleading claims. For an [[ImageObject]] to be 'original': No evidence the image has been misleadingly altered or manipulated, though it may still contain false or misleading claims. For an [[ImageObject]] with embedded text to be 'original': No evidence the image has been misleadingly altered or manipulated, though it may still contain false or misleading claims. For an [[AudioObject]] to be 'original': No evidence the audio has been misleadingly altered or manipulated, though it may contain false or misleading claims. OriginalShippingFees: URIRef # Specifies that the customer must pay the original shipping costs when returning a product. Osteopathic: URIRef # A system of medicine focused on promoting the body's innate ability to heal itself. Otolaryngologic: URIRef # A specific branch of medical science that is concerned with the ear, nose and throat and their respective disease states. OutOfStock: URIRef # Indicates that the item is out of stock. OutletStore: URIRef # An outlet store. OverviewHealthAspect: URIRef # Overview of the content. Contains a summarized view of the topic with the most relevant information for an introduction. OwnershipInfo: URIRef # A structured value providing information about when a certain organization or person owned a certain product. PET: URIRef # Positron emission tomography imaging. PaidLeave: URIRef # PaidLeave: this is a benefit for paid leave. PaintAction: URIRef # The act of producing a painting, typically with paint and canvas as instruments. Painting: URIRef # A painting. PalliativeProcedure: URIRef # A medical procedure intended primarily for palliative purposes, aimed at relieving the symptoms of an underlying health condition. Paperback: URIRef # Book format: Paperback. ParcelDelivery: URIRef # The delivery of a parcel either via the postal service or a commercial service. ParcelService: URIRef # A private parcel service as the delivery mode available for a certain offer.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#DHL\n* http://purl.org/goodrelations/v1#FederalExpress\n* http://purl.org/goodrelations/v1#UPS ParentAudience: URIRef # A set of characteristics describing parents, who can be interested in viewing some content. ParentalSupport: URIRef # ParentalSupport: this is a benefit for parental support. Park: URIRef # A park. ParkingFacility: URIRef # A parking lot or other parking facility. ParkingMap: URIRef # A parking map. PartiallyInForce: URIRef # Indicates that parts of the legislation are in force, and parts are not. Pathology: URIRef # A specific branch of medical science that is concerned with the study of the cause, origin and nature of a disease state, including its consequences as a result of manifestation of the disease. In clinical care, the term is used to designate a branch of medicine using laboratory tests to diagnose and determine the prognostic significance of illness. PathologyTest: URIRef # A medical test performed by a laboratory that typically involves examination of a tissue sample by a pathologist. Patient: URIRef # A patient is any person recipient of health care services. PatientExperienceHealthAspect: URIRef # Content about the real life experience of patients or people that have lived a similar experience about the topic. May be forums, topics, Q-and-A and related material. PawnShop: URIRef # A shop that will buy, or lend money against the security of, personal possessions. PayAction: URIRef # An agent pays a price to a participant. PaymentAutomaticallyApplied: URIRef # An automatic payment system is in place and will be used. PaymentCard: URIRef # A payment method using a credit, debit, store or other card to associate the payment with an account. PaymentChargeSpecification: URIRef # The costs of settling the payment using a particular payment method. PaymentComplete: URIRef # The payment has been received and processed. PaymentDeclined: URIRef # The payee received the payment, but it was declined for some reason. PaymentDue: URIRef # The payment is due, but still within an acceptable time to be received. PaymentMethod: URIRef # A payment method is a standardized procedure for transferring the monetary amount for a purchase. Payment methods are characterized by the legal and technical structures used, and by the organization or group carrying out the transaction.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#ByBankTransferInAdvance\n* http://purl.org/goodrelations/v1#ByInvoice\n* http://purl.org/goodrelations/v1#Cash\n* http://purl.org/goodrelations/v1#CheckInAdvance\n* http://purl.org/goodrelations/v1#COD\n* http://purl.org/goodrelations/v1#DirectDebit\n* http://purl.org/goodrelations/v1#GoogleCheckout\n* http://purl.org/goodrelations/v1#PayPal\n* http://purl.org/goodrelations/v1#PaySwarm PaymentPastDue: URIRef # The payment is due and considered late. PaymentService: URIRef # A Service to transfer funds from a person or organization to a beneficiary person or organization. PaymentStatusType: URIRef # A specific payment status. For example, PaymentDue, PaymentComplete, etc. Pediatric: URIRef # A specific branch of medical science that specializes in the care of infants, children and adolescents. PeopleAudience: URIRef # A set of characteristics belonging to people, e.g. who compose an item's target audience. PercutaneousProcedure: URIRef # A type of medical procedure that involves percutaneous techniques, where access to organs or tissue is achieved via needle-puncture of the skin. For example, catheter-based procedures like stent delivery. PerformAction: URIRef # The act of participating in performance arts. PerformanceRole: URIRef # A PerformanceRole is a Role that some entity places with regard to a theatrical performance, e.g. in a Movie, TVSeries etc. PerformingArtsTheater: URIRef # A theater or other performing art center. PerformingGroup: URIRef # A performance group, such as a band, an orchestra, or a circus. Periodical: URIRef # A publication in any medium issued in successive parts bearing numerical or chronological designations and intended, such as a magazine, scholarly journal, or newspaper to continue indefinitely.\n\nSee also [blog post](http://blog.schema.org/2014/09/schemaorg-support-for-bibliographic_2.html). Permit: URIRef # A permit issued by an organization, e.g. a parking pass. Person: URIRef # A person (alive, dead, undead, or fictional). PetStore: URIRef # A pet store. Pharmacy: URIRef # A pharmacy or drugstore. PharmacySpecialty: URIRef # The practice or art and science of preparing and dispensing drugs and medicines. Photograph: URIRef # A photograph. PhotographAction: URIRef # The act of capturing still images of objects using a camera. PhysicalActivity: URIRef # Any bodily activity that enhances or maintains physical fitness and overall health and wellness. Includes activity that is part of daily living and routine, structured exercise, and exercise prescribed as part of a medical treatment or recovery plan. PhysicalActivityCategory: URIRef # Categories of physical activity, organized by physiologic classification. PhysicalExam: URIRef # A type of physical examination of a patient performed by a physician. PhysicalTherapy: URIRef # A process of progressive physical care and rehabilitation aimed at improving a health condition. Physician: URIRef # A doctor's office. Physiotherapy: URIRef # The practice of treatment of disease, injury, or deformity by physical methods such as massage, heat treatment, and exercise rather than by drugs or surgery.. Place: URIRef # Entities that have a somewhat fixed, physical extension. PlaceOfWorship: URIRef # Place of worship, such as a church, synagogue, or mosque. PlaceboControlledTrial: URIRef # A placebo-controlled trial design. PlanAction: URIRef # The act of planning the execution of an event/task/action/reservation/plan to a future date. PlasticSurgery: URIRef # A specific branch of medical science that pertains to therapeutic or cosmetic repair or re-formation of missing, injured or malformed tissues or body parts by manual and instrumental means. Play: URIRef # A play is a form of literature, usually consisting of dialogue between characters, intended for theatrical performance rather than just reading. Note: A performance of a Play would be a [[TheaterEvent]] or [[BroadcastEvent]] - the *Play* being the [[workPerformed]]. PlayAction: URIRef # The act of playing/exercising/training/performing for enjoyment, leisure, recreation, Competition or exercise.\n\nRelated actions:\n\n* [[ListenAction]]: Unlike ListenAction (which is under ConsumeAction), PlayAction refers to performing for an audience or at an event, rather than consuming music.\n* [[WatchAction]]: Unlike WatchAction (which is under ConsumeAction), PlayAction refers to showing/displaying for an audience or at an event, rather than consuming visual content. Playground: URIRef # A playground. Plumber: URIRef # A plumbing service. PodcastEpisode: URIRef # A single episode of a podcast series. PodcastSeason: URIRef # A single season of a podcast. Many podcasts do not break down into separate seasons. In that case, PodcastSeries should be used. PodcastSeries: URIRef # A podcast is an episodic series of digital audio or video files which a user can download and listen to. Podiatric: URIRef # Podiatry is the care of the human foot, especially the diagnosis and treatment of foot disorders. PoliceStation: URIRef # A police station. Pond: URIRef # A pond. PostOffice: URIRef # A post office. PostalAddress: URIRef # The mailing address. PostalCodeRangeSpecification: URIRef # Indicates a range of postalcodes, usually defined as the set of valid codes between [[postalCodeBegin]] and [[postalCodeEnd]], inclusively. Poster: URIRef # A large, usually printed placard, bill, or announcement, often illustrated, that is posted to advertise or publicize something. PotentialActionStatus: URIRef # A description of an action that is supported. PreOrder: URIRef # Indicates that the item is available for pre-order. PreOrderAction: URIRef # An agent orders a (not yet released) object/product/service to be delivered/sent. PreSale: URIRef # Indicates that the item is available for ordering and delivery before general availability. PregnancyHealthAspect: URIRef # Content discussing pregnancy-related aspects of a health topic. PrependAction: URIRef # The act of inserting at the beginning if an ordered collection. Preschool: URIRef # A preschool. PrescriptionOnly: URIRef # Available by prescription only. PresentationDigitalDocument: URIRef # A file containing slides or used for a presentation. PreventionHealthAspect: URIRef # Information about actions or measures that can be taken to avoid getting the topic or reaching a critical situation related to the topic. PreventionIndication: URIRef # An indication for preventing an underlying condition, symptom, etc. PriceComponentTypeEnumeration: URIRef # Enumerates different price components that together make up the total price for an offered product. PriceSpecification: URIRef # A structured value representing a price or price range. Typically, only the subclasses of this type are used for markup. It is recommended to use [[MonetaryAmount]] to describe independent amounts of money such as a salary, credit card limits, etc. PriceTypeEnumeration: URIRef # Enumerates different price types, for example list price, invoice price, and sale price. PrimaryCare: URIRef # The medical care by a physician, or other health-care professional, who is the patient's first contact with the health-care system and who may recommend a specialist if necessary. Prion: URIRef # A prion is an infectious agent composed of protein in a misfolded form. Product: URIRef # Any offered product or service. For example: a pair of shoes; a concert ticket; the rental of a car; a haircut; or an episode of a TV show streamed online. ProductCollection: URIRef # A set of products (either [[ProductGroup]]s or specific variants) that are listed together e.g. in an [[Offer]]. ProductGroup: URIRef # A ProductGroup represents a group of [[Product]]s that vary only in certain well-described ways, such as by [[size]], [[color]], [[material]] etc. While a ProductGroup itself is not directly offered for sale, the various varying products that it represents can be. The ProductGroup serves as a prototype or template, standing in for all of the products who have an [[isVariantOf]] relationship to it. As such, properties (including additional types) can be applied to the ProductGroup to represent characteristics shared by each of the (possibly very many) variants. Properties that reference a ProductGroup are not included in this mechanism; neither are the following specific properties [[variesBy]], [[hasVariant]], [[url]]. ProductModel: URIRef # A datasheet or vendor specification of a product (in the sense of a prototypical description). ProfessionalService: URIRef # Original definition: "provider of professional services."\n\nThe general [[ProfessionalService]] type for local businesses was deprecated due to confusion with [[Service]]. For reference, the types that it included were: [[Dentist]], [[AccountingService]], [[Attorney]], [[Notary]], as well as types for several kinds of [[HomeAndConstructionBusiness]]: [[Electrician]], [[GeneralContractor]], [[HousePainter]], [[Locksmith]], [[Plumber]], [[RoofingContractor]]. [[LegalService]] was introduced as a more inclusive supertype of [[Attorney]]. ProfilePage: URIRef # Web page type: Profile page. PrognosisHealthAspect: URIRef # Typical progression and happenings of life course of the topic. ProgramMembership: URIRef # Used to describe membership in a loyalty programs (e.g. "StarAliance"), traveler clubs (e.g. "AAA"), purchase clubs ("Safeway Club"), etc. Project: URIRef # An enterprise (potentially individual but typically collaborative), planned to achieve a particular aim. Use properties from [[Organization]], [[subOrganization]]/[[parentOrganization]] to indicate project sub-structures. PronounceableText: URIRef # Data type: PronounceableText. Property: URIRef # A property, used to indicate attributes and relationships of some Thing; equivalent to rdf:Property. PropertyValue: URIRef # A property-value pair, e.g. representing a feature of a product or place. Use the 'name' property for the name of the property. If there is an additional human-readable version of the value, put that into the 'description' property.\n\n Always use specific schema.org properties when a) they exist and b) you can populate them. Using PropertyValue as a substitute will typically not trigger the same effect as using the original, specific property. PropertyValueSpecification: URIRef # A Property value specification. Protein: URIRef # Protein is here used in its widest possible definition, as classes of amino acid based molecules. Amyloid-beta Protein in human (UniProt P05067), eukaryota (e.g. an OrthoDB group) or even a single molecule that one can point to are all of type schema:Protein. A protein can thus be a subclass of another protein, e.g. schema:Protein as a UniProt record can have multiple isoforms inside it which would also be schema:Protein. They can be imagined, synthetic, hypothetical or naturally occurring. Protozoa: URIRef # Single-celled organism that causes an infection. Psychiatric: URIRef # A specific branch of medical science that is concerned with the study, treatment, and prevention of mental illness, using both medical and psychological therapies. PsychologicalTreatment: URIRef # A process of care relying upon counseling, dialogue and communication aimed at improving a mental health condition without use of drugs. PublicHealth: URIRef # Branch of medicine that pertains to the health services to improve and protect community health, especially epidemiology, sanitation, immunization, and preventive medicine. PublicHolidays: URIRef # This stands for any day that is a public holiday; it is a placeholder for all official public holidays in some particular location. While not technically a "day of the week", it can be used with [[OpeningHoursSpecification]]. In the context of an opening hours specification it can be used to indicate opening hours on public holidays, overriding general opening hours for the day of the week on which a public holiday occurs. PublicSwimmingPool: URIRef # A public swimming pool. PublicToilet: URIRef # A public toilet is a room or small building containing one or more toilets (and possibly also urinals) which is available for use by the general public, or by customers or employees of certain businesses. PublicationEvent: URIRef # A PublicationEvent corresponds indifferently to the event of publication for a CreativeWork of any type e.g. a broadcast event, an on-demand event, a book/journal publication via a variety of delivery media. PublicationIssue: URIRef # A part of a successively published publication such as a periodical or publication volume, often numbered, usually containing a grouping of works such as articles.\n\nSee also [blog post](http://blog.schema.org/2014/09/schemaorg-support-for-bibliographic_2.html). PublicationVolume: URIRef # A part of a successively published publication such as a periodical or multi-volume work, often numbered. It may represent a time span, such as a year.\n\nSee also [blog post](http://blog.schema.org/2014/09/schemaorg-support-for-bibliographic_2.html). Pulmonary: URIRef # A specific branch of medical science that pertains to the study of the respiratory system and its respective disease states. QAPage: URIRef # A QAPage is a WebPage focussed on a specific Question and its Answer(s), e.g. in a question answering site or documenting Frequently Asked Questions (FAQs). QualitativeValue: URIRef # A predefined value for a product characteristic, e.g. the power cord plug type 'US' or the garment sizes 'S', 'M', 'L', and 'XL'. QuantitativeValue: URIRef # A point value or interval for product characteristics and other purposes. QuantitativeValueDistribution: URIRef # A statistical distribution of values. Quantity: URIRef # Quantities such as distance, time, mass, weight, etc. Particular instances of say Mass are entities like '3 Kg' or '4 milligrams'. Question: URIRef # A specific question - e.g. from a user seeking answers online, or collected in a Frequently Asked Questions (FAQ) document. Quiz: URIRef # Quiz: A test of knowledge, skills and abilities. Quotation: URIRef # A quotation. Often but not necessarily from some written work, attributable to a real world author and - if associated with a fictional character - to any fictional Person. Use [[isBasedOn]] to link to source/origin. The [[recordedIn]] property can be used to reference a Quotation from an [[Event]]. QuoteAction: URIRef # An agent quotes/estimates/appraises an object/product/service with a price at a location/store. RVPark: URIRef # A place offering space for "Recreational Vehicles", Caravans, mobile homes and the like. RadiationTherapy: URIRef # A process of care using radiation aimed at improving a health condition. RadioBroadcastService: URIRef # A delivery service through which radio content is provided via broadcast over the air or online. RadioChannel: URIRef # A unique instance of a radio BroadcastService on a CableOrSatelliteService lineup. RadioClip: URIRef # A short radio program or a segment/part of a radio program. RadioEpisode: URIRef # A radio episode which can be part of a series or season. RadioSeason: URIRef # Season dedicated to radio broadcast and associated online delivery. RadioSeries: URIRef # CreativeWorkSeries dedicated to radio broadcast and associated online delivery. RadioStation: URIRef # A radio station. Radiography: URIRef # Radiography is an imaging technique that uses electromagnetic radiation other than visible light, especially X-rays, to view the internal structure of a non-uniformly composed and opaque object such as the human body. RandomizedTrial: URIRef # A randomized trial design. Rating: URIRef # A rating is an evaluation on a numeric scale, such as 1 to 5 stars. ReactAction: URIRef # The act of responding instinctively and emotionally to an object, expressing a sentiment. ReadAction: URIRef # The act of consuming written content. ReadPermission: URIRef # Permission to read or view the document. RealEstateAgent: URIRef # A real-estate agent. RealEstateListing: URIRef # A [[RealEstateListing]] is a listing that describes one or more real-estate [[Offer]]s (whose [[businessFunction]] is typically to lease out, or to sell). The [[RealEstateListing]] type itself represents the overall listing, as manifested in some [[WebPage]]. RearWheelDriveConfiguration: URIRef # Real-wheel drive is a transmission layout where the engine drives the rear wheels. ReceiveAction: URIRef # The act of physically/electronically taking delivery of an object that has been transferred from an origin to a destination. Reciprocal of SendAction.\n\nRelated actions:\n\n* [[SendAction]]: The reciprocal of ReceiveAction.\n* [[TakeAction]]: Unlike TakeAction, ReceiveAction does not imply that the ownership has been transfered (e.g. I can receive a package, but it does not mean the package is now mine). Recipe: URIRef # A recipe. For dietary restrictions covered by the recipe, a few common restrictions are enumerated via [[suitableForDiet]]. The [[keywords]] property can also be used to add more detail. Recommendation: URIRef # [[Recommendation]] is a type of [[Review]] that suggests or proposes something as the best option or best course of action. Recommendations may be for products or services, or other concrete things, as in the case of a ranked list or product guide. A [[Guide]] may list multiple recommendations for different categories. For example, in a [[Guide]] about which TVs to buy, the author may have several [[Recommendation]]s. RecommendedDoseSchedule: URIRef # A recommended dosing schedule for a drug or supplement as prescribed or recommended by an authority or by the drug/supplement's manufacturer. Capture the recommending authority in the recognizingAuthority property of MedicalEntity. Recruiting: URIRef # Recruiting participants. RecyclingCenter: URIRef # A recycling center. RefundTypeEnumeration: URIRef # Enumerates several kinds of product return refund types. RefurbishedCondition: URIRef # Indicates that the item is refurbished. RegisterAction: URIRef # The act of registering to be a user of a service, product or web page.\n\nRelated actions:\n\n* [[JoinAction]]: Unlike JoinAction, RegisterAction implies you are registering to be a user of a service, *not* a group/team of people.\n* [FollowAction]]: Unlike FollowAction, RegisterAction doesn't imply that the agent is expecting to poll for updates from the object.\n* [[SubscribeAction]]: Unlike SubscribeAction, RegisterAction doesn't imply that the agent is expecting updates from the object. Registry: URIRef # A registry-based study design. ReimbursementCap: URIRef # The drug's cost represents the maximum reimbursement paid by an insurer for the drug. RejectAction: URIRef # The act of rejecting to/adopting an object.\n\nRelated actions:\n\n* [[AcceptAction]]: The antonym of RejectAction. RelatedTopicsHealthAspect: URIRef # Other prominent or relevant topics tied to the main topic. RemixAlbum: URIRef # RemixAlbum. Renal: URIRef # A specific branch of medical science that pertains to the study of the kidneys and its respective disease states. RentAction: URIRef # The act of giving money in return for temporary use, but not ownership, of an object such as a vehicle or property. For example, an agent rents a property from a landlord in exchange for a periodic payment. RentalCarReservation: URIRef # A reservation for a rental car.\n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. RentalVehicleUsage: URIRef # Indicates the usage of the vehicle as a rental car. RepaymentSpecification: URIRef # A structured value representing repayment. ReplaceAction: URIRef # The act of editing a recipient by replacing an old object with a new object. ReplyAction: URIRef # The act of responding to a question/message asked/sent by the object. Related to [[AskAction]]\n\nRelated actions:\n\n* [[AskAction]]: Appears generally as an origin of a ReplyAction. Report: URIRef # A Report generated by governmental or non-governmental organization. ReportageNewsArticle: URIRef # The [[ReportageNewsArticle]] type is a subtype of [[NewsArticle]] representing news articles which are the result of journalistic news reporting conventions. In practice many news publishers produce a wide variety of article types, many of which might be considered a [[NewsArticle]] but not a [[ReportageNewsArticle]]. For example, opinion pieces, reviews, analysis, sponsored or satirical articles, or articles that combine several of these elements. The [[ReportageNewsArticle]] type is based on a stricter ideal for "news" as a work of journalism, with articles based on factual information either observed or verified by the author, or reported and verified from knowledgeable sources. This often includes perspectives from multiple viewpoints on a particular issue (distinguishing news reports from public relations or propaganda). News reports in the [[ReportageNewsArticle]] sense de-emphasize the opinion of the author, with commentary and value judgements typically expressed elsewhere. A [[ReportageNewsArticle]] which goes deeper into analysis can also be marked with an additional type of [[AnalysisNewsArticle]]. ReportedDoseSchedule: URIRef # A patient-reported or observed dosing schedule for a drug or supplement. ResearchOrganization: URIRef # A Research Organization (e.g. scientific institute, research company). ResearchProject: URIRef # A Research project. Researcher: URIRef # Researchers. Reservation: URIRef # Describes a reservation for travel, dining or an event. Some reservations require tickets. \n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, restaurant reservations, flights, or rental cars, use [[Offer]]. ReservationCancelled: URIRef # The status for a previously confirmed reservation that is now cancelled. ReservationConfirmed: URIRef # The status of a confirmed reservation. ReservationHold: URIRef # The status of a reservation on hold pending an update like credit card number or flight changes. ReservationPackage: URIRef # A group of multiple reservations with common values for all sub-reservations. ReservationPending: URIRef # The status of a reservation when a request has been sent, but not confirmed. ReservationStatusType: URIRef # Enumerated status values for Reservation. ReserveAction: URIRef # Reserving a concrete object.\n\nRelated actions:\n\n* [[ScheduleAction]]: Unlike ScheduleAction, ReserveAction reserves concrete objects (e.g. a table, a hotel) towards a time slot / spatial allocation. Reservoir: URIRef # A reservoir of water, typically an artificially created lake, like the Lake Kariba reservoir. Residence: URIRef # The place where a person lives. Resort: URIRef # A resort is a place used for relaxation or recreation, attracting visitors for holidays or vacations. Resorts are places, towns or sometimes commercial establishment operated by a single company (Source: Wikipedia, the free encyclopedia, see http://en.wikipedia.org/wiki/Resort).

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. RespiratoryTherapy: URIRef # The therapy that is concerned with the maintenance or improvement of respiratory function (as in patients with pulmonary disease). Restaurant: URIRef # A restaurant. RestockingFees: URIRef # Specifies that the customer must pay a restocking fee when returning a product RestrictedDiet: URIRef # A diet restricted to certain foods or preparations for cultural, religious, health or lifestyle reasons. ResultsAvailable: URIRef # Results are available. ResultsNotAvailable: URIRef # Results are not available. ResumeAction: URIRef # The act of resuming a device or application which was formerly paused (e.g. resume music playback or resume a timer). Retail: URIRef # The drug's cost represents the retail cost of the drug. ReturnAction: URIRef # The act of returning to the origin that which was previously received (concrete objects) or taken (ownership). ReturnAtKiosk: URIRef # Specifies that product returns must be made at a kiosk. ReturnByMail: URIRef # Specifies that product returns must to be done by mail. ReturnFeesCustomerResponsibility: URIRef # Specifies that product returns must be paid for, and are the responsibility of, the customer. ReturnFeesEnumeration: URIRef # Enumerates several kinds of policies for product return fees. ReturnInStore: URIRef # Specifies that product returns must be made in a store. ReturnLabelCustomerResponsibility: URIRef # Indicated that creating a return label is the responsibility of the customer. ReturnLabelDownloadAndPrint: URIRef # Indicated that a return label must be downloaded and printed by the customer. ReturnLabelInBox: URIRef # Specifies that a return label will be provided by the seller in the shipping box. ReturnLabelSourceEnumeration: URIRef # Enumerates several types of return labels for product returns. ReturnMethodEnumeration: URIRef # Enumerates several types of product return methods. ReturnShippingFees: URIRef # Specifies that the customer must pay the return shipping costs when returning a product Review: URIRef # A review of an item - for example, of a restaurant, movie, or store. ReviewAction: URIRef # The act of producing a balanced opinion about the object for an audience. An agent reviews an object with participants resulting in a review. ReviewNewsArticle: URIRef # A [[NewsArticle]] and [[CriticReview]] providing a professional critic's assessment of a service, product, performance, or artistic or literary work. Rheumatologic: URIRef # A specific branch of medical science that deals with the study and treatment of rheumatic, autoimmune or joint diseases. RightHandDriving: URIRef # The steering position is on the right side of the vehicle (viewed from the main direction of driving). RisksOrComplicationsHealthAspect: URIRef # Information about the risk factors and possible complications that may follow a topic. RiverBodyOfWater: URIRef # A river (for example, the broad majestic Shannon). Role: URIRef # Represents additional information about a relationship or property. For example a Role can be used to say that a 'member' role linking some SportsTeam to a player occurred during a particular time period. Or that a Person's 'actor' role in a Movie was for some particular characterName. Such properties can be attached to a Role entity, which is then associated with the main entities using ordinary properties like 'member' or 'actor'.\n\nSee also [blog post](http://blog.schema.org/2014/06/introducing-role.html). RoofingContractor: URIRef # A roofing contractor. Room: URIRef # A room is a distinguishable space within a structure, usually separated from other spaces by interior walls. (Source: Wikipedia, the free encyclopedia, see http://en.wikipedia.org/wiki/Room).

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. RsvpAction: URIRef # The act of notifying an event organizer as to whether you expect to attend the event. RsvpResponseMaybe: URIRef # The invitee may or may not attend. RsvpResponseNo: URIRef # The invitee will not attend. RsvpResponseType: URIRef # RsvpResponseType is an enumeration type whose instances represent responding to an RSVP request. RsvpResponseYes: URIRef # The invitee will attend. SRP: URIRef # Represents the suggested retail price ("SRP") of an offered product. SafetyHealthAspect: URIRef # Content about the safety-related aspects of a health topic. SaleEvent: URIRef # Event type: Sales event. SalePrice: URIRef # Represents a sale price (usually active for a limited period) of an offered product. SatireOrParodyContent: URIRef # Content coded 'satire or parody content' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'satire or parody content': A video that was created as political or humorous commentary and is presented in that context. (Reshares of satire/parody content that do not include relevant context are more likely to fall under the “missing context” rating.) For an [[ImageObject]] to be 'satire or parody content': An image that was created as political or humorous commentary and is presented in that context. (Reshares of satire/parody content that do not include relevant context are more likely to fall under the “missing context” rating.) For an [[ImageObject]] with embedded text to be 'satire or parody content': An image that was created as political or humorous commentary and is presented in that context. (Reshares of satire/parody content that do not include relevant context are more likely to fall under the “missing context” rating.) For an [[AudioObject]] to be 'satire or parody content': Audio that was created as political or humorous commentary and is presented in that context. (Reshares of satire/parody content that do not include relevant context are more likely to fall under the “missing context” rating.) SatiricalArticle: URIRef # An [[Article]] whose content is primarily [[satirical]](https://en.wikipedia.org/wiki/Satire) in nature, i.e. unlikely to be literally true. A satirical article is sometimes but not necessarily also a [[NewsArticle]]. [[ScholarlyArticle]]s are also sometimes satirized. Saturday: URIRef # The day of the week between Friday and Sunday. Schedule: URIRef # A schedule defines a repeating time period used to describe a regularly occurring [[Event]]. At a minimum a schedule will specify [[repeatFrequency]] which describes the interval between occurences of the event. Additional information can be provided to specify the schedule more precisely. This includes identifying the day(s) of the week or month when the recurring event will take place, in addition to its start and end time. Schedules may also have start and end dates to indicate when they are active, e.g. to define a limited calendar of events. ScheduleAction: URIRef # Scheduling future actions, events, or tasks.\n\nRelated actions:\n\n* [[ReserveAction]]: Unlike ReserveAction, ScheduleAction allocates future actions (e.g. an event, a task, etc) towards a time slot / spatial allocation. ScholarlyArticle: URIRef # A scholarly article. School: URIRef # A school. SchoolDistrict: URIRef # A School District is an administrative area for the administration of schools. ScreeningEvent: URIRef # A screening of a movie or other video. ScreeningHealthAspect: URIRef # Content about how to screen or further filter a topic. Sculpture: URIRef # A piece of sculpture. SeaBodyOfWater: URIRef # A sea (for example, the Caspian sea). SearchAction: URIRef # The act of searching for an object.\n\nRelated actions:\n\n* [[FindAction]]: SearchAction generally leads to a FindAction, but not necessarily. SearchResultsPage: URIRef # Web page type: Search results page. Season: URIRef # A media season e.g. tv, radio, video game etc. Seat: URIRef # Used to describe a seat, such as a reserved seat in an event reservation. SeatingMap: URIRef # A seating map. SeeDoctorHealthAspect: URIRef # Information about questions that may be asked, when to see a professional, measures before seeing a doctor or content about the first consultation. SeekToAction: URIRef # This is the [[Action]] of navigating to a specific [[startOffset]] timestamp within a [[VideoObject]], typically represented with a URL template structure. SelfCareHealthAspect: URIRef # Self care actions or measures that can be taken to sooth, health or avoid a topic. This may be carried at home and can be carried/managed by the person itself. SelfStorage: URIRef # A self-storage facility. SellAction: URIRef # The act of taking money from a buyer in exchange for goods or services rendered. An agent sells an object, product, or service to a buyer for a price. Reciprocal of BuyAction. SendAction: URIRef # The act of physically/electronically dispatching an object for transfer from an origin to a destination.Related actions:\n\n* [[ReceiveAction]]: The reciprocal of SendAction.\n* [[GiveAction]]: Unlike GiveAction, SendAction does not imply the transfer of ownership (e.g. I can send you my laptop, but I'm not necessarily giving it to you). Series: URIRef # A Series in schema.org is a group of related items, typically but not necessarily of the same kind. See also [[CreativeWorkSeries]], [[EventSeries]]. Service: URIRef # A service provided by an organization, e.g. delivery service, print services, etc. ServiceChannel: URIRef # A means for accessing a service, e.g. a government office location, web site, or phone number. ShareAction: URIRef # The act of distributing content to people for their amusement or edification. SheetMusic: URIRef # Printed music, as opposed to performed or recorded music. ShippingDeliveryTime: URIRef # ShippingDeliveryTime provides various pieces of information about delivery times for shipping. ShippingRateSettings: URIRef # A ShippingRateSettings represents re-usable pieces of shipping information. It is designed for publication on an URL that may be referenced via the [[shippingSettingsLink]] property of an [[OfferShippingDetails]]. Several occurrences can be published, distinguished and matched (i.e. identified/referenced) by their different values for [[shippingLabel]]. ShoeStore: URIRef # A shoe store. ShoppingCenter: URIRef # A shopping center or mall. ShortStory: URIRef # Short story or tale. A brief work of literature, usually written in narrative prose. SideEffectsHealthAspect: URIRef # Side effects that can be observed from the usage of the topic. SingleBlindedTrial: URIRef # A trial design in which the researcher knows which treatment the patient was randomly assigned to but the patient does not. SingleCenterTrial: URIRef # A trial that takes place at a single center. SingleFamilyResidence: URIRef # Residence type: Single-family home. SinglePlayer: URIRef # Play mode: SinglePlayer. Which is played by a lone player. SingleRelease: URIRef # SingleRelease. SiteNavigationElement: URIRef # A navigation element of the page. SizeGroupEnumeration: URIRef # Enumerates common size groups for various product categories. SizeSpecification: URIRef # Size related properties of a product, typically a size code ([[name]]) and optionally a [[sizeSystem]], [[sizeGroup]], and product measurements ([[hasMeasurement]]). In addition, the intended audience can be defined through [[suggestedAge]], [[suggestedGender]], and suggested body measurements ([[suggestedMeasurement]]). SizeSystemEnumeration: URIRef # Enumerates common size systems for different categories of products, for example "EN-13402" or "UK" for wearables or "Imperial" for screws. SizeSystemImperial: URIRef # Imperial size system. SizeSystemMetric: URIRef # Metric size system. SkiResort: URIRef # A ski resort. Skin: URIRef # Skin assessment with clinical examination. SocialEvent: URIRef # Event type: Social event. SocialMediaPosting: URIRef # A post to a social media platform, including blog posts, tweets, Facebook posts, etc. SoftwareApplication: URIRef # A software application. SoftwareSourceCode: URIRef # Computer programming source code. Example: Full (compile ready) solutions, code snippet samples, scripts, templates. SoldOut: URIRef # Indicates that the item has sold out. SolveMathAction: URIRef # The action that takes in a math expression and directs users to a page potentially capable of solving/simplifying that expression. SomeProducts: URIRef # A placeholder for multiple similar products of the same kind. SoundtrackAlbum: URIRef # SoundtrackAlbum. SpeakableSpecification: URIRef # A SpeakableSpecification indicates (typically via [[xpath]] or [[cssSelector]]) sections of a document that are highlighted as particularly [[speakable]]. Instances of this type are expected to be used primarily as values of the [[speakable]] property. SpecialAnnouncement: URIRef # A SpecialAnnouncement combines a simple date-stamped textual information update with contextualized Web links and other structured data. It represents an information update made by a locally-oriented organization, for example schools, pharmacies, healthcare providers, community groups, police, local government. For work in progress guidelines on Coronavirus-related markup see [this doc](https://docs.google.com/document/d/14ikaGCKxo50rRM7nvKSlbUpjyIk2WMQd3IkB1lItlrM/edit#). The motivating scenario for SpecialAnnouncement is the [Coronavirus pandemic](https://en.wikipedia.org/wiki/2019%E2%80%9320_coronavirus_pandemic), and the initial vocabulary is oriented to this urgent situation. Schema.org expect to improve the markup iteratively as it is deployed and as feedback emerges from use. In addition to our usual [Github entry](https://github.com/schemaorg/schemaorg/issues/2490), feedback comments can also be provided in [this document](https://docs.google.com/document/d/1fpdFFxk8s87CWwACs53SGkYv3aafSxz_DTtOQxMrBJQ/edit#). While this schema is designed to communicate urgent crisis-related information, it is not the same as an emergency warning technology like [CAP](https://en.wikipedia.org/wiki/Common_Alerting_Protocol), although there may be overlaps. The intent is to cover the kinds of everyday practical information being posted to existing websites during an emergency situation. Several kinds of information can be provided: We encourage the provision of "name", "text", "datePosted", "expires" (if appropriate), "category" and "url" as a simple baseline. It is important to provide a value for "category" where possible, most ideally as a well known URL from Wikipedia or Wikidata. In the case of the 2019-2020 Coronavirus pandemic, this should be "https://en.wikipedia.org/w/index.php?title=2019-20\_coronavirus\_pandemic" or "https://www.wikidata.org/wiki/Q81068910". For many of the possible properties, values can either be simple links or an inline description, depending on whether a summary is available. For a link, provide just the URL of the appropriate page as the property's value. For an inline description, use a [[WebContent]] type, and provide the url as a property of that, alongside at least a simple "[[text]]" summary of the page. It is unlikely that a single SpecialAnnouncement will need all of the possible properties simultaneously. We expect that in many cases the page referenced might contain more specialized structured data, e.g. contact info, [[openingHours]], [[Event]], [[FAQPage]] etc. By linking to those pages from a [[SpecialAnnouncement]] you can help make it clearer that the events are related to the situation (e.g. Coronavirus) indicated by the [[category]] property of the [[SpecialAnnouncement]]. Many [[SpecialAnnouncement]]s will relate to particular regions and to identifiable local organizations. Use [[spatialCoverage]] for the region, and [[announcementLocation]] to indicate specific [[LocalBusiness]]es and [[CivicStructure]]s. If the announcement affects both a particular region and a specific location (for example, a library closure that serves an entire region), use both [[spatialCoverage]] and [[announcementLocation]]. The [[about]] property can be used to indicate entities that are the focus of the announcement. We now recommend using [[about]] only for representing non-location entities (e.g. a [[Course]] or a [[RadioStation]]). For places, use [[announcementLocation]] and [[spatialCoverage]]. Consumers of this markup should be aware that the initial design encouraged the use of /about for locations too. The basic content of [[SpecialAnnouncement]] is similar to that of an [RSS](https://en.wikipedia.org/wiki/RSS) or [Atom](https://en.wikipedia.org/wiki/Atom_(Web_standard)) feed. For publishers without such feeds, basic feed-like information can be shared by posting [[SpecialAnnouncement]] updates in a page, e.g. using JSON-LD. For sites with Atom/RSS functionality, you can point to a feed with the [[webFeed]] property. This can be a simple URL, or an inline [[DataFeed]] object, with [[encodingFormat]] providing media type information e.g. "application/rss+xml" or "application/atom+xml". Specialty: URIRef # Any branch of a field in which people typically develop specific expertise, usually after significant study, time, and effort. SpeechPathology: URIRef # The scientific study and treatment of defects, disorders, and malfunctions of speech and voice, as stuttering, lisping, or lalling, and of language disturbances, as aphasia or delayed language acquisition. SpokenWordAlbum: URIRef # SpokenWordAlbum. SportingGoodsStore: URIRef # A sporting goods store. SportsActivityLocation: URIRef # A sports location, such as a playing field. SportsClub: URIRef # A sports club. SportsEvent: URIRef # Event type: Sports event. SportsOrganization: URIRef # Represents the collection of all sports organizations, including sports teams, governing bodies, and sports associations. SportsTeam: URIRef # Organization: Sports team. SpreadsheetDigitalDocument: URIRef # A spreadsheet file. StadiumOrArena: URIRef # A stadium. StagedContent: URIRef # Content coded 'staged content' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'staged content': A video that has been created using actors or similarly contrived. For an [[ImageObject]] to be 'staged content': An image that was created using actors or similarly contrived, such as a screenshot of a fake tweet. For an [[ImageObject]] with embedded text to be 'staged content': An image that was created using actors or similarly contrived, such as a screenshot of a fake tweet. For an [[AudioObject]] to be 'staged content': Audio that has been created using actors or similarly contrived. StagesHealthAspect: URIRef # Stages that can be observed from a topic. State: URIRef # A state or province of a country. Statement: URIRef # A statement about something, for example a fun or interesting fact. If known, the main entity this statement is about, can be indicated using mainEntity. For more formal claims (e.g. in Fact Checking), consider using [[Claim]] instead. Use the [[text]] property to capture the text of the statement. StatisticalPopulation: URIRef # A StatisticalPopulation is a set of instances of a certain given type that satisfy some set of constraints. The property [[populationType]] is used to specify the type. Any property that can be used on instances of that type can appear on the statistical population. For example, a [[StatisticalPopulation]] representing all [[Person]]s with a [[homeLocation]] of East Podunk California, would be described by applying the appropriate [[homeLocation]] and [[populationType]] properties to a [[StatisticalPopulation]] item that stands for that set of people. The properties [[numConstraints]] and [[constrainingProperty]] are used to specify which of the populations properties are used to specify the population. Note that the sense of "population" used here is the general sense of a statistical population, and does not imply that the population consists of people. For example, a [[populationType]] of [[Event]] or [[NewsArticle]] could be used. See also [[Observation]], and the [data and datasets](/docs/data-and-datasets.html) overview for more details. StatusEnumeration: URIRef # Lists or enumerations dealing with status types. SteeringPositionValue: URIRef # A value indicating a steering position. Store: URIRef # A retail good store. StoreCreditRefund: URIRef # Specifies that the customer receives a store credit as refund when returning a product StrengthTraining: URIRef # Physical activity that is engaged in to improve muscle and bone strength. Also referred to as resistance training. StructuredValue: URIRef # Structured values are used when the value of a property has a more complex structure than simply being a textual value or a reference to another thing. StudioAlbum: URIRef # StudioAlbum. SubscribeAction: URIRef # The act of forming a personal connection with someone/something (object) unidirectionally/asymmetrically to get updates pushed to.\n\nRelated actions:\n\n* [[FollowAction]]: Unlike FollowAction, SubscribeAction implies that the subscriber acts as a passive agent being constantly/actively pushed for updates.\n* [[RegisterAction]]: Unlike RegisterAction, SubscribeAction implies that the agent is interested in continuing receiving updates from the object.\n* [[JoinAction]]: Unlike JoinAction, SubscribeAction implies that the agent is interested in continuing receiving updates from the object. Subscription: URIRef # Represents the subscription pricing component of the total price for an offered product. Substance: URIRef # Any matter of defined composition that has discrete existence, whose origin may be biological, mineral or chemical. SubwayStation: URIRef # A subway station. Suite: URIRef # A suite in a hotel or other public accommodation, denotes a class of luxury accommodations, the key feature of which is multiple rooms (Source: Wikipedia, the free encyclopedia, see http://en.wikipedia.org/wiki/Suite_(hotel)).

See also the dedicated document on the use of schema.org for marking up hotels and other forms of accommodations. Sunday: URIRef # The day of the week between Saturday and Monday. SuperficialAnatomy: URIRef # Anatomical features that can be observed by sight (without dissection), including the form and proportions of the human body as well as surface landmarks that correspond to deeper subcutaneous structures. Superficial anatomy plays an important role in sports medicine, phlebotomy, and other medical specialties as underlying anatomical structures can be identified through surface palpation. For example, during back surgery, superficial anatomy can be used to palpate and count vertebrae to find the site of incision. Or in phlebotomy, superficial anatomy can be used to locate an underlying vein; for example, the median cubital vein can be located by palpating the borders of the cubital fossa (such as the epicondyles of the humerus) and then looking for the superficial signs of the vein, such as size, prominence, ability to refill after depression, and feel of surrounding tissue support. As another example, in a subluxation (dislocation) of the glenohumeral joint, the bony structure becomes pronounced with the deltoid muscle failing to cover the glenohumeral joint allowing the edges of the scapula to be superficially visible. Here, the superficial anatomy is the visible edges of the scapula, implying the underlying dislocation of the joint (the related anatomical structure). Surgical: URIRef # A specific branch of medical science that pertains to treating diseases, injuries and deformities by manual and instrumental means. SurgicalProcedure: URIRef # A medical procedure involving an incision with instruments; performed for diagnose, or therapeutic purposes. SuspendAction: URIRef # The act of momentarily pausing a device or application (e.g. pause music playback or pause a timer). Suspended: URIRef # Suspended. SymptomsHealthAspect: URIRef # Symptoms or related symptoms of a Topic. Synagogue: URIRef # A synagogue. TVClip: URIRef # A short TV program or a segment/part of a TV program. TVEpisode: URIRef # A TV episode which can be part of a series or season. TVSeason: URIRef # Season dedicated to TV broadcast and associated online delivery. TVSeries: URIRef # CreativeWorkSeries dedicated to TV broadcast and associated online delivery. Table: URIRef # A table on a Web page. TakeAction: URIRef # The act of gaining ownership of an object from an origin. Reciprocal of GiveAction.\n\nRelated actions:\n\n* [[GiveAction]]: The reciprocal of TakeAction.\n* [[ReceiveAction]]: Unlike ReceiveAction, TakeAction implies that ownership has been transfered. TattooParlor: URIRef # A tattoo parlor. Taxi: URIRef # A taxi. TaxiReservation: URIRef # A reservation for a taxi.\n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use [[Offer]]. TaxiService: URIRef # A service for a vehicle for hire with a driver for local travel. Fares are usually calculated based on distance traveled. TaxiStand: URIRef # A taxi stand. TaxiVehicleUsage: URIRef # Indicates the usage of the car as a taxi. Taxon: URIRef # A set of organisms asserted to represent a natural cohesive biological unit. TechArticle: URIRef # A technical article - Example: How-to (task) topics, step-by-step, procedural troubleshooting, specifications, etc. TelevisionChannel: URIRef # A unique instance of a television BroadcastService on a CableOrSatelliteService lineup. TelevisionStation: URIRef # A television station. TennisComplex: URIRef # A tennis complex. Terminated: URIRef # Terminated. Text: URIRef # Data type: Text. TextDigitalDocument: URIRef # A file composed primarily of text. TheaterEvent: URIRef # Event type: Theater performance. TheaterGroup: URIRef # A theater group or company, for example, the Royal Shakespeare Company or Druid Theatre. Therapeutic: URIRef # A medical device used for therapeutic purposes. TherapeuticProcedure: URIRef # A medical procedure intended primarily for therapeutic purposes, aimed at improving a health condition. Thesis: URIRef # A thesis or dissertation document submitted in support of candidature for an academic degree or professional qualification. Thing: URIRef # The most generic type of item. Throat: URIRef # Throat assessment with clinical examination. Thursday: URIRef # The day of the week between Wednesday and Friday. Ticket: URIRef # Used to describe a ticket to an event, a flight, a bus ride, etc. TieAction: URIRef # The act of reaching a draw in a competitive activity. Time: URIRef # A point in time recurring on multiple days in the form hh:mm:ss[Z|(+|-)hh:mm] (see [XML schema for details](http://www.w3.org/TR/xmlschema-2/#time)). TipAction: URIRef # The act of giving money voluntarily to a beneficiary in recognition of services rendered. TireShop: URIRef # A tire shop. TollFree: URIRef # The associated telephone number is toll free. TouristAttraction: URIRef # A tourist attraction. In principle any Thing can be a [[TouristAttraction]], from a [[Mountain]] and [[LandmarksOrHistoricalBuildings]] to a [[LocalBusiness]]. This Type can be used on its own to describe a general [[TouristAttraction]], or be used as an [[additionalType]] to add tourist attraction properties to any other type. (See examples below) TouristDestination: URIRef # A tourist destination. In principle any [[Place]] can be a [[TouristDestination]] from a [[City]], Region or [[Country]] to an [[AmusementPark]] or [[Hotel]]. This Type can be used on its own to describe a general [[TouristDestination]], or be used as an [[additionalType]] to add tourist relevant properties to any other [[Place]]. A [[TouristDestination]] is defined as a [[Place]] that contains, or is colocated with, one or more [[TouristAttraction]]s, often linked by a similar theme or interest to a particular [[touristType]]. The [UNWTO](http://www2.unwto.org/) defines Destination (main destination of a tourism trip) as the place visited that is central to the decision to take the trip. (See examples below). TouristInformationCenter: URIRef # A tourist information center. TouristTrip: URIRef # A tourist trip. A created itinerary of visits to one or more places of interest ([[TouristAttraction]]/[[TouristDestination]]) often linked by a similar theme, geographic area, or interest to a particular [[touristType]]. The [UNWTO](http://www2.unwto.org/) defines tourism trip as the Trip taken by visitors. (See examples below). Toxicologic: URIRef # A specific branch of medical science that is concerned with poisons, their nature, effects and detection and involved in the treatment of poisoning. ToyStore: URIRef # A toy store. TrackAction: URIRef # An agent tracks an object for updates.\n\nRelated actions:\n\n* [[FollowAction]]: Unlike FollowAction, TrackAction refers to the interest on the location of innanimates objects.\n* [[SubscribeAction]]: Unlike SubscribeAction, TrackAction refers to the interest on the location of innanimate objects. TradeAction: URIRef # The act of participating in an exchange of goods and services for monetary compensation. An agent trades an object, product or service with a participant in exchange for a one time or periodic payment. TraditionalChinese: URIRef # A system of medicine based on common theoretical concepts that originated in China and evolved over thousands of years, that uses herbs, acupuncture, exercise, massage, dietary therapy, and other methods to treat a wide range of conditions. TrainReservation: URIRef # A reservation for train travel.\n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, use [[Offer]]. TrainStation: URIRef # A train station. TrainTrip: URIRef # A trip on a commercial train line. TransferAction: URIRef # The act of transferring/moving (abstract or concrete) animate or inanimate objects from one place to another. TransformedContent: URIRef # Content coded 'transformed content' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'transformed content': or all of the video has been manipulated to transform the footage itself. This category includes using tools like the Adobe Suite to change the speed of the video, add or remove visual elements or dub audio. Deepfakes are also a subset of transformation. For an [[ImageObject]] to be transformed content': Adding or deleting visual elements to give the image a different meaning with the intention to mislead. For an [[ImageObject]] with embedded text to be 'transformed content': Adding or deleting visual elements to give the image a different meaning with the intention to mislead. For an [[AudioObject]] to be 'transformed content': Part or all of the audio has been manipulated to alter the words or sounds, or the audio has been synthetically generated, such as to create a sound-alike voice. TransitMap: URIRef # A transit map. TravelAction: URIRef # The act of traveling from an fromLocation to a destination by a specified mode of transport, optionally with participants. TravelAgency: URIRef # A travel agency. TreatmentIndication: URIRef # An indication for treating an underlying condition, symptom, etc. TreatmentsHealthAspect: URIRef # Treatments or related therapies for a Topic. Trip: URIRef # A trip or journey. An itinerary of visits to one or more places. TripleBlindedTrial: URIRef # A trial design in which neither the researcher, the person administering the therapy nor the patient knows the details of the treatment the patient was randomly assigned to. # True: URIRef # The boolean value true. Tuesday: URIRef # The day of the week between Monday and Wednesday. TypeAndQuantityNode: URIRef # A structured value indicating the quantity, unit of measurement, and business function of goods included in a bundle offer. TypesHealthAspect: URIRef # Categorization and other types related to a topic. UKNonprofitType: URIRef # UKNonprofitType: Non-profit organization type originating from the United Kingdom. UKTrust: URIRef # UKTrust: Non-profit type referring to a UK trust. URL: URIRef # Data type: URL. USNonprofitType: URIRef # USNonprofitType: Non-profit organization type originating from the United States. Ultrasound: URIRef # Ultrasound imaging. UnRegisterAction: URIRef # The act of un-registering from a service.\n\nRelated actions:\n\n* [[RegisterAction]]: antonym of UnRegisterAction.\n* [[LeaveAction]]: Unlike LeaveAction, UnRegisterAction implies that you are unregistering from a service you werer previously registered, rather than leaving a team/group of people. UnemploymentSupport: URIRef # UnemploymentSupport: this is a benefit for unemployment support. UnincorporatedAssociationCharity: URIRef # UnincorporatedAssociationCharity: Non-profit type referring to a charitable company that is not incorporated (UK). UnitPriceSpecification: URIRef # The price asked for a given offer by the respective organization or person. UnofficialLegalValue: URIRef # Indicates that a document has no particular or special standing (e.g. a republication of a law by a private publisher). UpdateAction: URIRef # The act of managing by changing/editing the state of the object. Urologic: URIRef # A specific branch of medical science that is concerned with the diagnosis and treatment of diseases pertaining to the urinary tract and the urogenital system. UsageOrScheduleHealthAspect: URIRef # Content about how, when, frequency and dosage of a topic. UseAction: URIRef # The act of applying an object to its intended purpose. UsedCondition: URIRef # Indicates that the item is used. UserBlocks: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserCheckins: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserComments: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserDownloads: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserInteraction: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserLikes: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserPageVisits: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserPlays: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserPlusOnes: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. UserReview: URIRef # A review created by an end-user (e.g. consumer, purchaser, attendee etc.), in contrast with [[CriticReview]]. UserTweets: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. VeganDiet: URIRef # A diet exclusive of all animal products. VegetarianDiet: URIRef # A diet exclusive of animal meat. Vehicle: URIRef # A vehicle is a device that is designed or used to transport people or cargo over land, water, air, or through space. Vein: URIRef # A type of blood vessel that specifically carries blood to the heart. VenueMap: URIRef # A venue map (e.g. for malls, auditoriums, museums, etc.). Vessel: URIRef # A component of the human body circulatory system comprised of an intricate network of hollow tubes that transport blood throughout the entire body. VeterinaryCare: URIRef # A vet's office. VideoGallery: URIRef # Web page type: Video gallery page. VideoGame: URIRef # A video game is an electronic game that involves human interaction with a user interface to generate visual feedback on a video device. VideoGameClip: URIRef # A short segment/part of a video game. VideoGameSeries: URIRef # A video game series. VideoObject: URIRef # A video file. VideoObjectSnapshot: URIRef # A specific and exact (byte-for-byte) version of a [[VideoObject]]. Two byte-for-byte identical files, for the purposes of this type, considered identical. If they have different embedded metadata the files will differ. Different external facts about the files, e.g. creator or dateCreated that aren't represented in their actual content, do not affect this notion of identity. ViewAction: URIRef # The act of consuming static visual content. VinylFormat: URIRef # VinylFormat. VirtualLocation: URIRef # An online or virtual location for attending events. For example, one may attend an online seminar or educational event. While a virtual location may be used as the location of an event, virtual locations should not be confused with physical locations in the real world. Virus: URIRef # Pathogenic virus that causes viral infection. VisualArtsEvent: URIRef # Event type: Visual arts event. VisualArtwork: URIRef # A work of art that is primarily visual in character. VitalSign: URIRef # Vital signs are measures of various physiological functions in order to assess the most basic body functions. Volcano: URIRef # A volcano, like Fuji san. VoteAction: URIRef # The act of expressing a preference from a fixed/finite/structured set of choices/options. WPAdBlock: URIRef # An advertising section of the page. WPFooter: URIRef # The footer section of the page. WPHeader: URIRef # The header section of the page. WPSideBar: URIRef # A sidebar section of the page. WantAction: URIRef # The act of expressing a desire about the object. An agent wants an object. WarrantyPromise: URIRef # A structured value representing the duration and scope of services that will be provided to a customer free of charge in case of a defect or malfunction of a product. WarrantyScope: URIRef # A range of of services that will be provided to a customer free of charge in case of a defect or malfunction of a product.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#Labor-BringIn\n* http://purl.org/goodrelations/v1#PartsAndLabor-BringIn\n* http://purl.org/goodrelations/v1#PartsAndLabor-PickUp WatchAction: URIRef # The act of consuming dynamic/moving visual content. Waterfall: URIRef # A waterfall, like Niagara. WearAction: URIRef # The act of dressing oneself in clothing. WearableMeasurementBack: URIRef # Measurement of the back section, for example of a jacket WearableMeasurementChestOrBust: URIRef # Measurement of the chest/bust section, for example of a suit WearableMeasurementCollar: URIRef # Measurement of the collar, for example of a shirt WearableMeasurementCup: URIRef # Measurement of the cup, for example of a bra WearableMeasurementHeight: URIRef # Measurement of the height, for example the heel height of a shoe WearableMeasurementHips: URIRef # Measurement of the hip section, for example of a skirt WearableMeasurementInseam: URIRef # Measurement of the inseam, for example of pants WearableMeasurementLength: URIRef # Represents the length, for example of a dress WearableMeasurementOutsideLeg: URIRef # Measurement of the outside leg, for example of pants WearableMeasurementSleeve: URIRef # Measurement of the sleeve length, for example of a shirt WearableMeasurementTypeEnumeration: URIRef # Enumerates common types of measurement for wearables products. WearableMeasurementWaist: URIRef # Measurement of the waist section, for example of pants WearableMeasurementWidth: URIRef # Measurement of the width, for example of shoes WearableSizeGroupBig: URIRef # Size group "Big" for wearables. WearableSizeGroupBoys: URIRef # Size group "Boys" for wearables. WearableSizeGroupEnumeration: URIRef # Enumerates common size groups (also known as "size types") for wearable products. WearableSizeGroupExtraShort: URIRef # Size group "Extra Short" for wearables. WearableSizeGroupExtraTall: URIRef # Size group "Extra Tall" for wearables. WearableSizeGroupGirls: URIRef # Size group "Girls" for wearables. WearableSizeGroupHusky: URIRef # Size group "Husky" (or "Stocky") for wearables. WearableSizeGroupInfants: URIRef # Size group "Infants" for wearables. WearableSizeGroupJuniors: URIRef # Size group "Juniors" for wearables. WearableSizeGroupMaternity: URIRef # Size group "Maternity" for wearables. WearableSizeGroupMens: URIRef # Size group "Mens" for wearables. WearableSizeGroupMisses: URIRef # Size group "Misses" (also known as "Missy") for wearables. WearableSizeGroupPetite: URIRef # Size group "Petite" for wearables. WearableSizeGroupPlus: URIRef # Size group "Plus" for wearables. WearableSizeGroupRegular: URIRef # Size group "Regular" for wearables. WearableSizeGroupShort: URIRef # Size group "Short" for wearables. WearableSizeGroupTall: URIRef # Size group "Tall" for wearables. WearableSizeGroupWomens: URIRef # Size group "Womens" for wearables. WearableSizeSystemAU: URIRef # Australian size system for wearables. WearableSizeSystemBR: URIRef # Brazilian size system for wearables. WearableSizeSystemCN: URIRef # Chinese size system for wearables. WearableSizeSystemContinental: URIRef # Continental size system for wearables. WearableSizeSystemDE: URIRef # German size system for wearables. WearableSizeSystemEN13402: URIRef # EN 13402 (joint European standard for size labelling of clothes). WearableSizeSystemEnumeration: URIRef # Enumerates common size systems specific for wearable products WearableSizeSystemEurope: URIRef # European size system for wearables. WearableSizeSystemFR: URIRef # French size system for wearables. WearableSizeSystemGS1: URIRef # GS1 (formerly NRF) size system for wearables. WearableSizeSystemIT: URIRef # Italian size system for wearables. WearableSizeSystemJP: URIRef # Japanese size system for wearables. WearableSizeSystemMX: URIRef # Mexican size system for wearables. WearableSizeSystemUK: URIRef # United Kingdom size system for wearables. WearableSizeSystemUS: URIRef # United States size system for wearables. WebAPI: URIRef # An application programming interface accessible over Web/Internet technologies. WebApplication: URIRef # Web applications. WebContent: URIRef # WebContent is a type representing all [[WebPage]], [[WebSite]] and [[WebPageElement]] content. It is sometimes the case that detailed distinctions between Web pages, sites and their parts is not always important or obvious. The [[WebContent]] type makes it easier to describe Web-addressable content without requiring such distinctions to always be stated. (The intent is that the existing types [[WebPage]], [[WebSite]] and [[WebPageElement]] will eventually be declared as subtypes of [[WebContent]]). WebPage: URIRef # A web page. Every web page is implicitly assumed to be declared to be of type WebPage, so the various properties about that webpage, such as breadcrumb may be used. We recommend explicit declaration if these properties are specified, but if they are found outside of an itemscope, they will be assumed to be about the page. WebPageElement: URIRef # A web page element, like a table or an image. WebSite: URIRef # A WebSite is a set of related web pages and other items typically served from a single web domain and accessible via URLs. Wednesday: URIRef # The day of the week between Tuesday and Thursday. WesternConventional: URIRef # The conventional Western system of medicine, that aims to apply the best available evidence gained from the scientific method to clinical decision making. Also known as conventional or Western medicine. Wholesale: URIRef # The drug's cost represents the wholesale acquisition cost of the drug. WholesaleStore: URIRef # A wholesale store. WinAction: URIRef # The act of achieving victory in a competitive activity. Winery: URIRef # A winery. Withdrawn: URIRef # Withdrawn. WorkBasedProgram: URIRef # A program with both an educational and employment component. Typically based at a workplace and structured around work-based learning, with the aim of instilling competencies related to an occupation. WorkBasedProgram is used to distinguish programs such as apprenticeships from school, college or other classroom based educational programs. WorkersUnion: URIRef # A Workers Union (also known as a Labor Union, Labour Union, or Trade Union) is an organization that promotes the interests of its worker members by collectively bargaining with management, organizing, and political lobbying. WriteAction: URIRef # The act of authoring written creative content. WritePermission: URIRef # Permission to write or edit the document. XPathType: URIRef # Text representing an XPath (typically but not necessarily version 1.0). XRay: URIRef # X-ray imaging. ZoneBoardingPolicy: URIRef # The airline boards by zones of the plane. Zoo: URIRef # A zoo. about: URIRef # The subject matter of the content. abridged: URIRef # Indicates whether the book is an abridged edition. abstract: URIRef # An abstract is a short description that summarizes a [[CreativeWork]]. accelerationTime: URIRef # The time needed to accelerate the vehicle from a given start velocity to a given target velocity.\n\nTypical unit code(s): SEC for seconds\n\n* Note: There are unfortunately no standard unit codes for seconds/0..100 km/h or seconds/0..60 mph. Simply use "SEC" for seconds and indicate the velocities in the [[name]] of the [[QuantitativeValue]], or use [[valueReference]] with a [[QuantitativeValue]] of 0..60 mph or 0..100 km/h to specify the reference speeds. acceptedAnswer: URIRef # The answer(s) that has been accepted as best, typically on a Question/Answer site. Sites vary in their selection mechanisms, e.g. drawing on community opinion and/or the view of the Question author. acceptedOffer: URIRef # The offer(s) -- e.g., product, quantity and price combinations -- included in the order. acceptedPaymentMethod: URIRef # The payment method(s) accepted by seller for this offer. acceptsReservations: URIRef # Indicates whether a FoodEstablishment accepts reservations. Values can be Boolean, an URL at which reservations can be made or (for backwards compatibility) the strings ```Yes``` or ```No```. accessCode: URIRef # Password, PIN, or access code needed for delivery (e.g. from a locker). accessMode: URIRef # The human sensory perceptual system or cognitive faculty through which a person may process or perceive information. Expected values include: auditory, tactile, textual, visual, colorDependent, chartOnVisual, chemOnVisual, diagramOnVisual, mathOnVisual, musicOnVisual, textOnVisual. accessModeSufficient: URIRef # A list of single or combined accessModes that are sufficient to understand all the intellectual content of a resource. Expected values include: auditory, tactile, textual, visual. accessibilityAPI: URIRef # Indicates that the resource is compatible with the referenced accessibility API ([WebSchemas wiki lists possible values](http://www.w3.org/wiki/WebSchemas/Accessibility)). accessibilityControl: URIRef # Identifies input methods that are sufficient to fully control the described resource ([WebSchemas wiki lists possible values](http://www.w3.org/wiki/WebSchemas/Accessibility)). accessibilityFeature: URIRef # Content features of the resource, such as accessible media, alternatives and supported enhancements for accessibility ([WebSchemas wiki lists possible values](http://www.w3.org/wiki/WebSchemas/Accessibility)). accessibilityHazard: URIRef # A characteristic of the described resource that is physiologically dangerous to some users. Related to WCAG 2.0 guideline 2.3 ([WebSchemas wiki lists possible values](http://www.w3.org/wiki/WebSchemas/Accessibility)). accessibilitySummary: URIRef # A human-readable summary of specific accessibility features or deficiencies, consistent with the other accessibility metadata but expressing subtleties such as "short descriptions are present but long descriptions will be needed for non-visual users" or "short descriptions are present and no long descriptions are needed." accommodationCategory: URIRef # Category of an [[Accommodation]], following real estate conventions e.g. RESO (see [PropertySubType](https://ddwiki.reso.org/display/DDW17/PropertySubType+Field), and [PropertyType](https://ddwiki.reso.org/display/DDW17/PropertyType+Field) fields for suggested values). accommodationFloorPlan: URIRef # A floorplan of some [[Accommodation]]. accountId: URIRef # The identifier for the account the payment will be applied to. accountMinimumInflow: URIRef # A minimum amount that has to be paid in every month. accountOverdraftLimit: URIRef # An overdraft is an extension of credit from a lending institution when an account reaches zero. An overdraft allows the individual to continue withdrawing money even if the account has no funds in it. Basically the bank allows people to borrow a set amount of money. accountablePerson: URIRef # Specifies the Person that is legally accountable for the CreativeWork. acquireLicensePage: URIRef # Indicates a page documenting how licenses can be purchased or otherwise acquired, for the current item. acquiredFrom: URIRef # The organization or person from which the product was acquired. acrissCode: URIRef # The ACRISS Car Classification Code is a code used by many car rental companies, for classifying vehicles. ACRISS stands for Association of Car Rental Industry Systems and Standards. actionAccessibilityRequirement: URIRef # A set of requirements that a must be fulfilled in order to perform an Action. If more than one value is specied, fulfilling one set of requirements will allow the Action to be performed. actionApplication: URIRef # An application that can complete the request. actionOption: URIRef # A sub property of object. The options subject to this action. actionPlatform: URIRef # The high level platform(s) where the Action can be performed for the given URL. To specify a specific application or operating system instance, use actionApplication. actionStatus: URIRef # Indicates the current disposition of the Action. actionableFeedbackPolicy: URIRef # For a [[NewsMediaOrganization]] or other news-related [[Organization]], a statement about public engagement activities (for news media, the newsroom’s), including involving the public - digitally or otherwise -- in coverage decisions, reporting and activities after publication. activeIngredient: URIRef # An active ingredient, typically chemical compounds and/or biologic substances. activityDuration: URIRef # Length of time to engage in the activity. activityFrequency: URIRef # How often one should engage in the activity. actor: URIRef # An actor, e.g. in tv, radio, movie, video games etc., or in an event. Actors can be associated with individual items or with a series, episode, clip. actors: URIRef # An actor, e.g. in tv, radio, movie, video games etc. Actors can be associated with individual items or with a series, episode, clip. addOn: URIRef # An additional offer that can only be obtained in combination with the first base offer (e.g. supplements and extensions that are available for a surcharge). additionalName: URIRef # An additional name for a Person, can be used for a middle name. additionalNumberOfGuests: URIRef # If responding yes, the number of guests who will attend in addition to the invitee. additionalProperty: URIRef # A property-value pair representing an additional characteristics of the entitity, e.g. a product feature or another characteristic for which there is no matching property in schema.org.\n\nNote: Publishers should be aware that applications designed to use specific schema.org properties (e.g. https://schema.org/width, https://schema.org/color, https://schema.org/gtin13, ...) will typically expect such data to be provided using those properties, rather than using the generic property/value mechanism. additionalType: URIRef # An additional type for the item, typically used for adding more specific types from external vocabularies in microdata syntax. This is a relationship between something and a class that the thing is in. In RDFa syntax, it is better to use the native RDFa syntax - the 'typeof' attribute - for multiple types. Schema.org tools may have only weaker understanding of extra types, in particular those defined externally. additionalVariable: URIRef # Any additional component of the exercise prescription that may need to be articulated to the patient. This may include the order of exercises, the number of repetitions of movement, quantitative distance, progressions over time, etc. address: URIRef # Physical address of the item. addressCountry: URIRef # The country. For example, USA. You can also provide the two-letter [ISO 3166-1 alpha-2 country code](http://en.wikipedia.org/wiki/ISO_3166-1). addressLocality: URIRef # The locality in which the street address is, and which is in the region. For example, Mountain View. addressRegion: URIRef # The region in which the locality is, and which is in the country. For example, California or another appropriate first-level [Administrative division](https://en.wikipedia.org/wiki/List_of_administrative_divisions_by_country) administrationRoute: URIRef # A route by which this drug may be administered, e.g. 'oral'. advanceBookingRequirement: URIRef # The amount of time that is required between accepting the offer and the actual usage of the resource or service. adverseOutcome: URIRef # A possible complication and/or side effect of this therapy. If it is known that an adverse outcome is serious (resulting in death, disability, or permanent damage; requiring hospitalization; or is otherwise life-threatening or requires immediate medical attention), tag it as a seriouseAdverseOutcome instead. affectedBy: URIRef # Drugs that affect the test's results. affiliation: URIRef # An organization that this person is affiliated with. For example, a school/university, a club, or a team. afterMedia: URIRef # A media object representing the circumstances after performing this direction. agent: URIRef # The direct performer or driver of the action (animate or inanimate). e.g. *John* wrote a book. aggregateRating: URIRef # The overall rating, based on a collection of reviews or ratings, of the item. aircraft: URIRef # The kind of aircraft (e.g., "Boeing 747"). album: URIRef # A music album. albumProductionType: URIRef # Classification of the album by it's type of content: soundtrack, live album, studio album, etc. albumRelease: URIRef # A release of this album. albumReleaseType: URIRef # The kind of release which this album is: single, EP or album. albums: URIRef # A collection of music albums. alcoholWarning: URIRef # Any precaution, guidance, contraindication, etc. related to consumption of alcohol while taking this drug. algorithm: URIRef # The algorithm or rules to follow to compute the score. alignmentType: URIRef # A category of alignment between the learning resource and the framework node. Recommended values include: 'requires', 'textComplexity', 'readingLevel', and 'educationalSubject'. alternateName: URIRef # An alias for the item. alternativeHeadline: URIRef # A secondary title of the CreativeWork. alternativeOf: URIRef # Another gene which is a variation of this one. alumni: URIRef # Alumni of an organization. alumniOf: URIRef # An organization that the person is an alumni of. amenityFeature: URIRef # An amenity feature (e.g. a characteristic or service) of the Accommodation. This generic property does not make a statement about whether the feature is included in an offer for the main accommodation or available at extra costs. amount: URIRef # The amount of money. amountOfThisGood: URIRef # The quantity of the goods included in the offer. announcementLocation: URIRef # Indicates a specific [[CivicStructure]] or [[LocalBusiness]] associated with the SpecialAnnouncement. For example, a specific testing facility or business with special opening hours. For a larger geographic region like a quarantine of an entire region, use [[spatialCoverage]]. annualPercentageRate: URIRef # The annual rate that is charged for borrowing (or made by investing), expressed as a single percentage number that represents the actual yearly cost of funds over the term of a loan. This includes any fees or additional costs associated with the transaction. answerCount: URIRef # The number of answers this question has received. answerExplanation: URIRef # A step-by-step or full explanation about Answer. Can outline how this Answer was achieved or contain more broad clarification or statement about it. antagonist: URIRef # The muscle whose action counteracts the specified muscle. appearance: URIRef # Indicates an occurence of a [[Claim]] in some [[CreativeWork]]. applicableLocation: URIRef # The location in which the status applies. applicantLocationRequirements: URIRef # The location(s) applicants can apply from. This is usually used for telecommuting jobs where the applicant does not need to be in a physical office. Note: This should not be used for citizenship or work visa requirements. application: URIRef # An application that can complete the request. applicationCategory: URIRef # Type of software application, e.g. 'Game, Multimedia'. applicationContact: URIRef # Contact details for further information relevant to this job posting. applicationDeadline: URIRef # The date at which the program stops collecting applications for the next enrollment cycle. applicationStartDate: URIRef # The date at which the program begins collecting applications for the next enrollment cycle. applicationSubCategory: URIRef # Subcategory of the application, e.g. 'Arcade Game'. applicationSuite: URIRef # The name of the application suite to which the application belongs (e.g. Excel belongs to Office). appliesToDeliveryMethod: URIRef # The delivery method(s) to which the delivery charge or payment charge specification applies. appliesToPaymentMethod: URIRef # The payment method(s) to which the payment charge specification applies. archiveHeld: URIRef # Collection, [fonds](https://en.wikipedia.org/wiki/Fonds), or item held, kept or maintained by an [[ArchiveOrganization]]. archivedAt: URIRef # Indicates a page or other link involved in archival of a [[CreativeWork]]. In the case of [[MediaReview]], the items in a [[MediaReviewItem]] may often become inaccessible, but be archived by archival, journalistic, activist, or law enforcement organizations. In such cases, the referenced page may not directly publish the content. area: URIRef # The area within which users can expect to reach the broadcast service. areaServed: URIRef # The geographic area where a service or offered item is provided. arrivalAirport: URIRef # The airport where the flight terminates. arrivalBoatTerminal: URIRef # The terminal or port from which the boat arrives. arrivalBusStop: URIRef # The stop or station from which the bus arrives. arrivalGate: URIRef # Identifier of the flight's arrival gate. arrivalPlatform: URIRef # The platform where the train arrives. arrivalStation: URIRef # The station where the train trip ends. arrivalTerminal: URIRef # Identifier of the flight's arrival terminal. arrivalTime: URIRef # The expected arrival time. artEdition: URIRef # The number of copies when multiple copies of a piece of artwork are produced - e.g. for a limited edition of 20 prints, 'artEdition' refers to the total number of copies (in this example "20"). artMedium: URIRef # The material used. (e.g. Oil, Watercolour, Acrylic, Linoprint, Marble, Cyanotype, Digital, Lithograph, DryPoint, Intaglio, Pastel, Woodcut, Pencil, Mixed Media, etc.) arterialBranch: URIRef # The branches that comprise the arterial structure. artform: URIRef # e.g. Painting, Drawing, Sculpture, Print, Photograph, Assemblage, Collage, etc. articleBody: URIRef # The actual body of the article. articleSection: URIRef # Articles may belong to one or more 'sections' in a magazine or newspaper, such as Sports, Lifestyle, etc. artist: URIRef # The primary artist for a work in a medium other than pencils or digital line art--for example, if the primary artwork is done in watercolors or digital paints. artworkSurface: URIRef # The supporting materials for the artwork, e.g. Canvas, Paper, Wood, Board, etc. aspect: URIRef # An aspect of medical practice that is considered on the page, such as 'diagnosis', 'treatment', 'causes', 'prognosis', 'etiology', 'epidemiology', etc. assembly: URIRef # Library file name e.g., mscorlib.dll, system.web.dll. assemblyVersion: URIRef # Associated product/technology version. e.g., .NET Framework 4.5. assesses: URIRef # The item being described is intended to assess the competency or learning outcome defined by the referenced term. associatedAnatomy: URIRef # The anatomy of the underlying organ system or structures associated with this entity. associatedArticle: URIRef # A NewsArticle associated with the Media Object. associatedClaimReview: URIRef # An associated [[ClaimReview]], related by specific common content, topic or claim. The expectation is that this property would be most typically used in cases where a single activity is conducting both claim reviews and media reviews, in which case [[relatedMediaReview]] would commonly be used on a [[ClaimReview]], while [[relatedClaimReview]] would be used on [[MediaReview]]. associatedDisease: URIRef # Disease associated to this BioChemEntity. Such disease can be a MedicalCondition or a URL. If you want to add an evidence supporting the association, please use PropertyValue. associatedMedia: URIRef # A media object that encodes this CreativeWork. This property is a synonym for encoding. associatedMediaReview: URIRef # An associated [[MediaReview]], related by specific common content, topic or claim. The expectation is that this property would be most typically used in cases where a single activity is conducting both claim reviews and media reviews, in which case [[relatedMediaReview]] would commonly be used on a [[ClaimReview]], while [[relatedClaimReview]] would be used on [[MediaReview]]. associatedPathophysiology: URIRef # If applicable, a description of the pathophysiology associated with the anatomical system, including potential abnormal changes in the mechanical, physical, and biochemical functions of the system. associatedReview: URIRef # An associated [[Review]]. athlete: URIRef # A person that acts as performing member of a sports team; a player as opposed to a coach. attendee: URIRef # A person or organization attending the event. attendees: URIRef # A person attending the event. audience: URIRef # An intended audience, i.e. a group for whom something was created. audienceType: URIRef # The target group associated with a given audience (e.g. veterans, car owners, musicians, etc.). audio: URIRef # An embedded audio object. authenticator: URIRef # The Organization responsible for authenticating the user's subscription. For example, many media apps require a cable/satellite provider to authenticate your subscription before playing media. author: URIRef # The author of this content or rating. Please note that author is special in that HTML 5 provides a special mechanism for indicating authorship via the rel tag. That is equivalent to this and may be used interchangeably. availability: URIRef # The availability of this item—for example In stock, Out of stock, Pre-order, etc. availabilityEnds: URIRef # The end of the availability of the product or service included in the offer. availabilityStarts: URIRef # The beginning of the availability of the product or service included in the offer. availableAtOrFrom: URIRef # The place(s) from which the offer can be obtained (e.g. store locations). availableChannel: URIRef # A means of accessing the service (e.g. a phone bank, a web site, a location, etc.). availableDeliveryMethod: URIRef # The delivery method(s) available for this offer. availableFrom: URIRef # When the item is available for pickup from the store, locker, etc. availableIn: URIRef # The location in which the strength is available. availableLanguage: URIRef # A language someone may use with or at the item, service or place. Please use one of the language codes from the [IETF BCP 47 standard](http://tools.ietf.org/html/bcp47). See also [[inLanguage]] availableOnDevice: URIRef # Device required to run the application. Used in cases where a specific make/model is required to run the application. availableService: URIRef # A medical service available from this provider. availableStrength: URIRef # An available dosage strength for the drug. availableTest: URIRef # A diagnostic test or procedure offered by this lab. availableThrough: URIRef # After this date, the item will no longer be available for pickup. award: URIRef # An award won by or for this item. awards: URIRef # Awards won by or for this item. awayTeam: URIRef # The away team in a sports event. backstory: URIRef # For an [[Article]], typically a [[NewsArticle]], the backstory property provides a textual summary giving a brief explanation of why and how an article was created. In a journalistic setting this could include information about reporting process, methods, interviews, data sources, etc. bankAccountType: URIRef # The type of a bank account. baseSalary: URIRef # The base salary of the job or of an employee in an EmployeeRole. bccRecipient: URIRef # A sub property of recipient. The recipient blind copied on a message. bed: URIRef # The type of bed or beds included in the accommodation. For the single case of just one bed of a certain type, you use bed directly with a text. If you want to indicate the quantity of a certain kind of bed, use an instance of BedDetails. For more detailed information, use the amenityFeature property. beforeMedia: URIRef # A media object representing the circumstances before performing this direction. beneficiaryBank: URIRef # A bank or bank’s branch, financial institution or international financial institution operating the beneficiary’s bank account or releasing funds for the beneficiary. benefits: URIRef # Description of benefits associated with the job. benefitsSummaryUrl: URIRef # The URL that goes directly to the summary of benefits and coverage for the specific standard plan or plan variation. bestRating: URIRef # The highest value allowed in this rating system. If bestRating is omitted, 5 is assumed. billingAddress: URIRef # The billing address for the order. billingDuration: URIRef # Specifies for how long this price (or price component) will be billed. Can be used, for example, to model the contractual duration of a subscription or payment plan. Type can be either a Duration or a Number (in which case the unit of measurement, for example month, is specified by the unitCode property). billingIncrement: URIRef # This property specifies the minimal quantity and rounding increment that will be the basis for the billing. The unit of measurement is specified by the unitCode property. billingPeriod: URIRef # The time interval used to compute the invoice. billingStart: URIRef # Specifies after how much time this price (or price component) becomes valid and billing starts. Can be used, for example, to model a price increase after the first year of a subscription. The unit of measurement is specified by the unitCode property. bioChemInteraction: URIRef # A BioChemEntity that is known to interact with this item. bioChemSimilarity: URIRef # A similar BioChemEntity, e.g., obtained by fingerprint similarity algorithms. biologicalRole: URIRef # A role played by the BioChemEntity within a biological context. biomechnicalClass: URIRef # The biomechanical properties of the bone. birthDate: URIRef # Date of birth. birthPlace: URIRef # The place where the person was born. bitrate: URIRef # The bitrate of the media object. blogPost: URIRef # A posting that is part of this blog. blogPosts: URIRef # Indicates a post that is part of a [[Blog]]. Note that historically, what we term a "Blog" was once known as a "weblog", and that what we term a "BlogPosting" is now often colloquially referred to as a "blog". bloodSupply: URIRef # The blood vessel that carries blood from the heart to the muscle. boardingGroup: URIRef # The airline-specific indicator of boarding order / preference. boardingPolicy: URIRef # The type of boarding policy used by the airline (e.g. zone-based or group-based). bodyLocation: URIRef # Location in the body of the anatomical structure. bodyType: URIRef # Indicates the design and body style of the vehicle (e.g. station wagon, hatchback, etc.). bookEdition: URIRef # The edition of the book. bookFormat: URIRef # The format of the book. bookingAgent: URIRef # 'bookingAgent' is an out-dated term indicating a 'broker' that serves as a booking agent. bookingTime: URIRef # The date and time the reservation was booked. borrower: URIRef # A sub property of participant. The person that borrows the object being lent. box: URIRef # A box is the area enclosed by the rectangle formed by two points. The first point is the lower corner, the second point is the upper corner. A box is expressed as two points separated by a space character. branch: URIRef # The branches that delineate from the nerve bundle. Not to be confused with [[branchOf]]. branchCode: URIRef # A short textual code (also called "store code") that uniquely identifies a place of business. The code is typically assigned by the parentOrganization and used in structured URLs.\n\nFor example, in the URL http://www.starbucks.co.uk/store-locator/etc/detail/3047 the code "3047" is a branchCode for a particular branch. branchOf: URIRef # The larger organization that this local business is a branch of, if any. Not to be confused with (anatomical)[[branch]]. brand: URIRef # The brand(s) associated with a product or service, or the brand(s) maintained by an organization or business person. breadcrumb: URIRef # A set of links that can help a user understand and navigate a website hierarchy. breastfeedingWarning: URIRef # Any precaution, guidance, contraindication, etc. related to this drug's use by breastfeeding mothers. broadcastAffiliateOf: URIRef # The media network(s) whose content is broadcast on this station. broadcastChannelId: URIRef # The unique address by which the BroadcastService can be identified in a provider lineup. In US, this is typically a number. broadcastDisplayName: URIRef # The name displayed in the channel guide. For many US affiliates, it is the network name. broadcastFrequency: URIRef # The frequency used for over-the-air broadcasts. Numeric values or simple ranges e.g. 87-99. In addition a shortcut idiom is supported for frequences of AM and FM radio channels, e.g. "87 FM". broadcastFrequencyValue: URIRef # The frequency in MHz for a particular broadcast. broadcastOfEvent: URIRef # The event being broadcast such as a sporting event or awards ceremony. broadcastServiceTier: URIRef # The type of service required to have access to the channel (e.g. Standard or Premium). broadcastSignalModulation: URIRef # The modulation (e.g. FM, AM, etc) used by a particular broadcast service. broadcastSubChannel: URIRef # The subchannel used for the broadcast. broadcastTimezone: URIRef # The timezone in [ISO 8601 format](http://en.wikipedia.org/wiki/ISO_8601) for which the service bases its broadcasts broadcaster: URIRef # The organization owning or operating the broadcast service. broker: URIRef # An entity that arranges for an exchange between a buyer and a seller. In most cases a broker never acquires or releases ownership of a product or service involved in an exchange. If it is not clear whether an entity is a broker, seller, or buyer, the latter two terms are preferred. browserRequirements: URIRef # Specifies browser requirements in human-readable text. For example, 'requires HTML5 support'. busName: URIRef # The name of the bus (e.g. Bolt Express). busNumber: URIRef # The unique identifier for the bus. businessDays: URIRef # Days of the week when the merchant typically operates, indicated via opening hours markup. businessFunction: URIRef # The business function (e.g. sell, lease, repair, dispose) of the offer or component of a bundle (TypeAndQuantityNode). The default is http://purl.org/goodrelations/v1#Sell. buyer: URIRef # A sub property of participant. The participant/person/organization that bought the object. byArtist: URIRef # The artist that performed this album or recording. byDay: URIRef # Defines the day(s) of the week on which a recurring [[Event]] takes place. May be specified using either [[DayOfWeek]], or alternatively [[Text]] conforming to iCal's syntax for byDay recurrence rules. byMonth: URIRef # Defines the month(s) of the year on which a recurring [[Event]] takes place. Specified as an [[Integer]] between 1-12. January is 1. byMonthDay: URIRef # Defines the day(s) of the month on which a recurring [[Event]] takes place. Specified as an [[Integer]] between 1-31. byMonthWeek: URIRef # Defines the week(s) of the month on which a recurring Event takes place. Specified as an Integer between 1-5. For clarity, byMonthWeek is best used in conjunction with byDay to indicate concepts like the first and third Mondays of a month. callSign: URIRef # A [callsign](https://en.wikipedia.org/wiki/Call_sign), as used in broadcasting and radio communications to identify people, radio and TV stations, or vehicles. calories: URIRef # The number of calories. candidate: URIRef # A sub property of object. The candidate subject of this action. caption: URIRef # The caption for this object. For downloadable machine formats (closed caption, subtitles etc.) use MediaObject and indicate the [[encodingFormat]]. carbohydrateContent: URIRef # The number of grams of carbohydrates. cargoVolume: URIRef # The available volume for cargo or luggage. For automobiles, this is usually the trunk volume.\n\nTypical unit code(s): LTR for liters, FTQ for cubic foot/feet\n\nNote: You can use [[minValue]] and [[maxValue]] to indicate ranges. carrier: URIRef # 'carrier' is an out-dated term indicating the 'provider' for parcel delivery and flights. carrierRequirements: URIRef # Specifies specific carrier(s) requirements for the application (e.g. an application may only work on a specific carrier network). cashBack: URIRef # A cardholder benefit that pays the cardholder a small percentage of their net expenditures. catalog: URIRef # A data catalog which contains this dataset. catalogNumber: URIRef # The catalog number for the release. category: URIRef # A category for the item. Greater signs or slashes can be used to informally indicate a category hierarchy. causeOf: URIRef # The condition, complication, symptom, sign, etc. caused. ccRecipient: URIRef # A sub property of recipient. The recipient copied on a message. character: URIRef # Fictional person connected with a creative work. characterAttribute: URIRef # A piece of data that represents a particular aspect of a fictional character (skill, power, character points, advantage, disadvantage). characterName: URIRef # The name of a character played in some acting or performing role, i.e. in a PerformanceRole. cheatCode: URIRef # Cheat codes to the game. checkinTime: URIRef # The earliest someone may check into a lodging establishment. checkoutTime: URIRef # The latest someone may check out of a lodging establishment. chemicalComposition: URIRef # The chemical composition describes the identity and relative ratio of the chemical elements that make up the substance. chemicalRole: URIRef # A role played by the BioChemEntity within a chemical context. childMaxAge: URIRef # Maximal age of the child. childMinAge: URIRef # Minimal age of the child. childTaxon: URIRef # Closest child taxa of the taxon in question. children: URIRef # A child of the person. cholesterolContent: URIRef # The number of milligrams of cholesterol. circle: URIRef # A circle is the circular region of a specified radius centered at a specified latitude and longitude. A circle is expressed as a pair followed by a radius in meters. citation: URIRef # A citation or reference to another creative work, such as another publication, web page, scholarly article, etc. claimInterpreter: URIRef # For a [[Claim]] interpreted from [[MediaObject]] content sed to indicate a claim contained, implied or refined from the content of a [[MediaObject]]. claimReviewed: URIRef # A short summary of the specific claims reviewed in a ClaimReview. clincalPharmacology: URIRef # Description of the absorption and elimination of drugs, including their concentration (pharmacokinetics, pK) and biological effects (pharmacodynamics, pD). clinicalPharmacology: URIRef # Description of the absorption and elimination of drugs, including their concentration (pharmacokinetics, pK) and biological effects (pharmacodynamics, pD). clipNumber: URIRef # Position of the clip within an ordered group of clips. closes: URIRef # The closing hour of the place or service on the given day(s) of the week. coach: URIRef # A person that acts in a coaching role for a sports team. code: URIRef # A medical code for the entity, taken from a controlled vocabulary or ontology such as ICD-9, DiseasesDB, MeSH, SNOMED-CT, RxNorm, etc. codeRepository: URIRef # Link to the repository where the un-compiled, human readable code and related code is located (SVN, github, CodePlex). codeSampleType: URIRef # What type of code sample: full (compile ready) solution, code snippet, inline code, scripts, template. codeValue: URIRef # A short textual code that uniquely identifies the value. codingSystem: URIRef # The coding system, e.g. 'ICD-10'. colleague: URIRef # A colleague of the person. colleagues: URIRef # A colleague of the person. collection: URIRef # A sub property of object. The collection target of the action. collectionSize: URIRef # The number of items in the [[Collection]]. color: URIRef # The color of the product. colorist: URIRef # The individual who adds color to inked drawings. comment: URIRef # Comments, typically from users. commentCount: URIRef # The number of comments this CreativeWork (e.g. Article, Question or Answer) has received. This is most applicable to works published in Web sites with commenting system; additional comments may exist elsewhere. commentText: URIRef # The text of the UserComment. commentTime: URIRef # The time at which the UserComment was made. competencyRequired: URIRef # Knowledge, skill, ability or personal attribute that must be demonstrated by a person or other entity in order to do something such as earn an Educational Occupational Credential or understand a LearningResource. competitor: URIRef # A competitor in a sports event. composer: URIRef # The person or organization who wrote a composition, or who is the composer of a work performed at some event. comprisedOf: URIRef # Specifying something physically contained by something else. Typically used here for the underlying anatomical structures, such as organs, that comprise the anatomical system. conditionsOfAccess: URIRef # Conditions that affect the availability of, or method(s) of access to, an item. Typically used for real world items such as an [[ArchiveComponent]] held by an [[ArchiveOrganization]]. This property is not suitable for use as a general Web access control mechanism. It is expressed only in natural language.\n\nFor example "Available by appointment from the Reading Room" or "Accessible only from logged-in accounts ". confirmationNumber: URIRef # A number that confirms the given order or payment has been received. connectedTo: URIRef # Other anatomical structures to which this structure is connected. constrainingProperty: URIRef # Indicates a property used as a constraint to define a [[StatisticalPopulation]] with respect to the set of entities corresponding to an indicated type (via [[populationType]]). contactOption: URIRef # An option available on this contact point (e.g. a toll-free number or support for hearing-impaired callers). contactPoint: URIRef # A contact point for a person or organization. contactPoints: URIRef # A contact point for a person or organization. contactType: URIRef # A person or organization can have different contact points, for different purposes. For example, a sales contact point, a PR contact point and so on. This property is used to specify the kind of contact point. contactlessPayment: URIRef # A secure method for consumers to purchase products or services via debit, credit or smartcards by using RFID or NFC technology. containedIn: URIRef # The basic containment relation between a place and one that contains it. containedInPlace: URIRef # The basic containment relation between a place and one that contains it. containsPlace: URIRef # The basic containment relation between a place and another that it contains. containsSeason: URIRef # A season that is part of the media series. contentLocation: URIRef # The location depicted or described in the content. For example, the location in a photograph or painting. contentRating: URIRef # Official rating of a piece of content—for example,'MPAA PG-13'. contentReferenceTime: URIRef # The specific time described by a creative work, for works (e.g. articles, video objects etc.) that emphasise a particular moment within an Event. contentSize: URIRef # File size in (mega/kilo) bytes. contentType: URIRef # The supported content type(s) for an EntryPoint response. contentUrl: URIRef # Actual bytes of the media object, for example the image file or video file. contraindication: URIRef # A contraindication for this therapy. contributor: URIRef # A secondary contributor to the CreativeWork or Event. cookTime: URIRef # The time it takes to actually cook the dish, in [ISO 8601 duration format](http://en.wikipedia.org/wiki/ISO_8601). cookingMethod: URIRef # The method of cooking, such as Frying, Steaming, ... copyrightHolder: URIRef # The party holding the legal copyright to the CreativeWork. copyrightNotice: URIRef # Text of a notice appropriate for describing the copyright aspects of this Creative Work, ideally indicating the owner of the copyright for the Work. copyrightYear: URIRef # The year during which the claimed copyright for the CreativeWork was first asserted. correction: URIRef # Indicates a correction to a [[CreativeWork]], either via a [[CorrectionComment]], textually or in another document. correctionsPolicy: URIRef # For an [[Organization]] (e.g. [[NewsMediaOrganization]]), a statement describing (in news media, the newsroom’s) disclosure and correction policy for errors. costCategory: URIRef # The category of cost, such as wholesale, retail, reimbursement cap, etc. costCurrency: URIRef # The currency (in 3-letter of the drug cost. See: http://en.wikipedia.org/wiki/ISO_4217. costOrigin: URIRef # Additional details to capture the origin of the cost data. For example, 'Medicare Part B'. costPerUnit: URIRef # The cost per unit of the drug. countriesNotSupported: URIRef # Countries for which the application is not supported. You can also provide the two-letter ISO 3166-1 alpha-2 country code. countriesSupported: URIRef # Countries for which the application is supported. You can also provide the two-letter ISO 3166-1 alpha-2 country code. countryOfAssembly: URIRef # The place where the product was assembled. countryOfLastProcessing: URIRef # The place where the item (typically [[Product]]) was last processed and tested before importation. countryOfOrigin: URIRef # The country of origin of something, including products as well as creative works such as movie and TV content. In the case of TV and movie, this would be the country of the principle offices of the production company or individual responsible for the movie. For other kinds of [[CreativeWork]] it is difficult to provide fully general guidance, and properties such as [[contentLocation]] and [[locationCreated]] may be more applicable. In the case of products, the country of origin of the product. The exact interpretation of this may vary by context and product type, and cannot be fully enumerated here. course: URIRef # A sub property of location. The course where this action was taken. courseCode: URIRef # The identifier for the [[Course]] used by the course [[provider]] (e.g. CS101 or 6.001). courseMode: URIRef # The medium or means of delivery of the course instance or the mode of study, either as a text label (e.g. "online", "onsite" or "blended"; "synchronous" or "asynchronous"; "full-time" or "part-time") or as a URL reference to a term from a controlled vocabulary (e.g. https://ceds.ed.gov/element/001311#Asynchronous ). coursePrerequisites: URIRef # Requirements for taking the Course. May be completion of another [[Course]] or a textual description like "permission of instructor". Requirements may be a pre-requisite competency, referenced using [[AlignmentObject]]. courseWorkload: URIRef # The amount of work expected of students taking the course, often provided as a figure per week or per month, and may be broken down by type. For example, "2 hours of lectures, 1 hour of lab work and 3 hours of independent study per week". coverageEndTime: URIRef # The time when the live blog will stop covering the Event. Note that coverage may continue after the Event concludes. coverageStartTime: URIRef # The time when the live blog will begin covering the Event. Note that coverage may begin before the Event's start time. The LiveBlogPosting may also be created before coverage begins. creativeWorkStatus: URIRef # The status of a creative work in terms of its stage in a lifecycle. Example terms include Incomplete, Draft, Published, Obsolete. Some organizations define a set of terms for the stages of their publication lifecycle. creator: URIRef # The creator/author of this CreativeWork. This is the same as the Author property for CreativeWork. credentialCategory: URIRef # The category or type of credential being described, for example "degree”, “certificate”, “badge”, or more specific term. creditText: URIRef # Text that can be used to credit person(s) and/or organization(s) associated with a published Creative Work. creditedTo: URIRef # The group the release is credited to if different than the byArtist. For example, Red and Blue is credited to "Stefani Germanotta Band", but by Lady Gaga. cssSelector: URIRef # A CSS selector, e.g. of a [[SpeakableSpecification]] or [[WebPageElement]]. In the latter case, multiple matches within a page can constitute a single conceptual "Web page element". currenciesAccepted: URIRef # The currency accepted.\n\nUse standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217) e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies) for cryptocurrencies e.g. "BTC"; well known names for [Local Exchange Tradings Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system) (LETS) and other currency types e.g. "Ithaca HOUR". currency: URIRef # The currency in which the monetary amount is expressed.\n\nUse standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217) e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies) for cryptocurrencies e.g. "BTC"; well known names for [Local Exchange Tradings Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system) (LETS) and other currency types e.g. "Ithaca HOUR". currentExchangeRate: URIRef # The current price of a currency. customer: URIRef # Party placing the order or paying the invoice. customerRemorseReturnFees: URIRef # The type of return fees if the product is returned due to customer remorse. customerRemorseReturnLabelSource: URIRef # The method (from an enumeration) by which the customer obtains a return shipping label for a product returned due to customer remorse. customerRemorseReturnShippingFeesAmount: URIRef # The amount of shipping costs if a product is returned due to customer remorse. Applicable when property [[customerRemorseReturnFees]] equals [[ReturnShippingFees]]. cutoffTime: URIRef # Order cutoff time allows merchants to describe the time after which they will no longer process orders received on that day. For orders processed after cutoff time, one day gets added to the delivery time estimate. This property is expected to be most typically used via the [[ShippingRateSettings]] publication pattern. The time is indicated using the ISO-8601 Time format, e.g. "23:30:00-05:00" would represent 6:30 pm Eastern Standard Time (EST) which is 5 hours behind Coordinated Universal Time (UTC). cvdCollectionDate: URIRef # collectiondate - Date for which patient counts are reported. cvdFacilityCounty: URIRef # Name of the County of the NHSN facility that this data record applies to. Use [[cvdFacilityId]] to identify the facility. To provide other details, [[healthcareReportingData]] can be used on a [[Hospital]] entry. cvdFacilityId: URIRef # Identifier of the NHSN facility that this data record applies to. Use [[cvdFacilityCounty]] to indicate the county. To provide other details, [[healthcareReportingData]] can be used on a [[Hospital]] entry. cvdNumBeds: URIRef # numbeds - HOSPITAL INPATIENT BEDS: Inpatient beds, including all staffed, licensed, and overflow (surge) beds used for inpatients. cvdNumBedsOcc: URIRef # numbedsocc - HOSPITAL INPATIENT BED OCCUPANCY: Total number of staffed inpatient beds that are occupied. cvdNumC19Died: URIRef # numc19died - DEATHS: Patients with suspected or confirmed COVID-19 who died in the hospital, ED, or any overflow location. cvdNumC19HOPats: URIRef # numc19hopats - HOSPITAL ONSET: Patients hospitalized in an NHSN inpatient care location with onset of suspected or confirmed COVID-19 14 or more days after hospitalization. cvdNumC19HospPats: URIRef # numc19hosppats - HOSPITALIZED: Patients currently hospitalized in an inpatient care location who have suspected or confirmed COVID-19. cvdNumC19MechVentPats: URIRef # numc19mechventpats - HOSPITALIZED and VENTILATED: Patients hospitalized in an NHSN inpatient care location who have suspected or confirmed COVID-19 and are on a mechanical ventilator. cvdNumC19OFMechVentPats: URIRef # numc19ofmechventpats - ED/OVERFLOW and VENTILATED: Patients with suspected or confirmed COVID-19 who are in the ED or any overflow location awaiting an inpatient bed and on a mechanical ventilator. cvdNumC19OverflowPats: URIRef # numc19overflowpats - ED/OVERFLOW: Patients with suspected or confirmed COVID-19 who are in the ED or any overflow location awaiting an inpatient bed. cvdNumICUBeds: URIRef # numicubeds - ICU BEDS: Total number of staffed inpatient intensive care unit (ICU) beds. cvdNumICUBedsOcc: URIRef # numicubedsocc - ICU BED OCCUPANCY: Total number of staffed inpatient ICU beds that are occupied. cvdNumTotBeds: URIRef # numtotbeds - ALL HOSPITAL BEDS: Total number of all Inpatient and outpatient beds, including all staffed,ICU, licensed, and overflow (surge) beds used for inpatients or outpatients. cvdNumVent: URIRef # numvent - MECHANICAL VENTILATORS: Total number of ventilators available. cvdNumVentUse: URIRef # numventuse - MECHANICAL VENTILATORS IN USE: Total number of ventilators in use. dataFeedElement: URIRef # An item within in a data feed. Data feeds may have many elements. dataset: URIRef # A dataset contained in this catalog. datasetTimeInterval: URIRef # The range of temporal applicability of a dataset, e.g. for a 2011 census dataset, the year 2011 (in ISO 8601 time interval format). dateCreated: URIRef # The date on which the CreativeWork was created or the item was added to a DataFeed. dateDeleted: URIRef # The datetime the item was removed from the DataFeed. dateIssued: URIRef # The date the ticket was issued. dateModified: URIRef # The date on which the CreativeWork was most recently modified or when the item's entry was modified within a DataFeed. datePosted: URIRef # Publication date of an online listing. datePublished: URIRef # Date of first broadcast/publication. dateRead: URIRef # The date/time at which the message has been read by the recipient if a single recipient exists. dateReceived: URIRef # The date/time the message was received if a single recipient exists. dateSent: URIRef # The date/time at which the message was sent. dateVehicleFirstRegistered: URIRef # The date of the first registration of the vehicle with the respective public authorities. dateline: URIRef # A [dateline](https://en.wikipedia.org/wiki/Dateline) is a brief piece of text included in news articles that describes where and when the story was written or filed though the date is often omitted. Sometimes only a placename is provided. Structured representations of dateline-related information can also be expressed more explicitly using [[locationCreated]] (which represents where a work was created e.g. where a news report was written). For location depicted or described in the content, use [[contentLocation]]. Dateline summaries are oriented more towards human readers than towards automated processing, and can vary substantially. Some examples: "BEIRUT, Lebanon, June 2.", "Paris, France", "December 19, 2017 11:43AM Reporting from Washington", "Beijing/Moscow", "QUEZON CITY, Philippines". dayOfWeek: URIRef # The day of the week for which these opening hours are valid. deathDate: URIRef # Date of death. deathPlace: URIRef # The place where the person died. defaultValue: URIRef # The default value of the input. For properties that expect a literal, the default is a literal value, for properties that expect an object, it's an ID reference to one of the current values. deliveryAddress: URIRef # Destination address. deliveryLeadTime: URIRef # The typical delay between the receipt of the order and the goods either leaving the warehouse or being prepared for pickup, in case the delivery method is on site pickup. deliveryMethod: URIRef # A sub property of instrument. The method of delivery. deliveryStatus: URIRef # New entry added as the package passes through each leg of its journey (from shipment to final delivery). deliveryTime: URIRef # The total delay between the receipt of the order and the goods reaching the final customer. department: URIRef # A relationship between an organization and a department of that organization, also described as an organization (allowing different urls, logos, opening hours). For example: a store with a pharmacy, or a bakery with a cafe. departureAirport: URIRef # The airport where the flight originates. departureBoatTerminal: URIRef # The terminal or port from which the boat departs. departureBusStop: URIRef # The stop or station from which the bus departs. departureGate: URIRef # Identifier of the flight's departure gate. departurePlatform: URIRef # The platform from which the train departs. departureStation: URIRef # The station from which the train departs. departureTerminal: URIRef # Identifier of the flight's departure terminal. departureTime: URIRef # The expected departure time. dependencies: URIRef # Prerequisites needed to fulfill steps in article. depth: URIRef # The depth of the item. description: URIRef # A description of the item. device: URIRef # Device required to run the application. Used in cases where a specific make/model is required to run the application. diagnosis: URIRef # One or more alternative conditions considered in the differential diagnosis process as output of a diagnosis process. diagram: URIRef # An image containing a diagram that illustrates the structure and/or its component substructures and/or connections with other structures. diet: URIRef # A sub property of instrument. The diet used in this action. dietFeatures: URIRef # Nutritional information specific to the dietary plan. May include dietary recommendations on what foods to avoid, what foods to consume, and specific alterations/deviations from the USDA or other regulatory body's approved dietary guidelines. differentialDiagnosis: URIRef # One of a set of differential diagnoses for the condition. Specifically, a closely-related or competing diagnosis typically considered later in the cognitive process whereby this medical condition is distinguished from others most likely responsible for a similar collection of signs and symptoms to reach the most parsimonious diagnosis or diagnoses in a patient. directApply: URIRef # Indicates whether an [[url]] that is associated with a [[JobPosting]] enables direct application for the job, via the posting website. A job posting is considered to have directApply of [[True]] if an application process for the specified job can be directly initiated via the url(s) given (noting that e.g. multiple internet domains might nevertheless be involved at an implementation level). A value of [[False]] is appropriate if there is no clear path to applying directly online for the specified job, navigating directly from the JobPosting url(s) supplied. director: URIRef # A director of e.g. tv, radio, movie, video gaming etc. content, or of an event. Directors can be associated with individual items or with a series, episode, clip. directors: URIRef # A director of e.g. tv, radio, movie, video games etc. content. Directors can be associated with individual items or with a series, episode, clip. disambiguatingDescription: URIRef # A sub property of description. A short description of the item used to disambiguate from other, similar items. Information from other properties (in particular, name) may be necessary for the description to be useful for disambiguation. discount: URIRef # Any discount applied (to an Order). discountCode: URIRef # Code used to redeem a discount. discountCurrency: URIRef # The currency of the discount.\n\nUse standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217) e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies) for cryptocurrencies e.g. "BTC"; well known names for [Local Exchange Tradings Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system) (LETS) and other currency types e.g. "Ithaca HOUR". discusses: URIRef # Specifies the CreativeWork associated with the UserComment. discussionUrl: URIRef # A link to the page containing the comments of the CreativeWork. diseasePreventionInfo: URIRef # Information about disease prevention. diseaseSpreadStatistics: URIRef # Statistical information about the spread of a disease, either as [[WebContent]], or described directly as a [[Dataset]], or the specific [[Observation]]s in the dataset. When a [[WebContent]] URL is provided, the page indicated might also contain more such markup. dissolutionDate: URIRef # The date that this organization was dissolved. distance: URIRef # The distance travelled, e.g. exercising or travelling. distinguishingSign: URIRef # One of a set of signs and symptoms that can be used to distinguish this diagnosis from others in the differential diagnosis. distribution: URIRef # A downloadable form of this dataset, at a specific location, in a specific format. diversityPolicy: URIRef # Statement on diversity policy by an [[Organization]] e.g. a [[NewsMediaOrganization]]. For a [[NewsMediaOrganization]], a statement describing the newsroom’s diversity policy on both staffing and sources, typically providing staffing data. diversityStaffingReport: URIRef # For an [[Organization]] (often but not necessarily a [[NewsMediaOrganization]]), a report on staffing diversity issues. In a news context this might be for example ASNE or RTDNA (US) reports, or self-reported. documentation: URIRef # Further documentation describing the Web API in more detail. doesNotShip: URIRef # Indicates when shipping to a particular [[shippingDestination]] is not available. domainIncludes: URIRef # Relates a property to a class that is (one of) the type(s) the property is expected to be used on. domiciledMortgage: URIRef # Whether borrower is a resident of the jurisdiction where the property is located. doorTime: URIRef # The time admission will commence. dosageForm: URIRef # A dosage form in which this drug/supplement is available, e.g. 'tablet', 'suspension', 'injection'. doseSchedule: URIRef # A dosing schedule for the drug for a given population, either observed, recommended, or maximum dose based on the type used. doseUnit: URIRef # The unit of the dose, e.g. 'mg'. doseValue: URIRef # The value of the dose, e.g. 500. downPayment: URIRef # a type of payment made in cash during the onset of the purchase of an expensive good/service. The payment typically represents only a percentage of the full purchase price. downloadUrl: URIRef # If the file can be downloaded, URL to download the binary. downvoteCount: URIRef # The number of downvotes this question, answer or comment has received from the community. drainsTo: URIRef # The vasculature that the vein drains into. driveWheelConfiguration: URIRef # The drive wheel configuration, i.e. which roadwheels will receive torque from the vehicle's engine via the drivetrain. dropoffLocation: URIRef # Where a rental car can be dropped off. dropoffTime: URIRef # When a rental car can be dropped off. drug: URIRef # Specifying a drug or medicine used in a medication procedure. drugClass: URIRef # The class of drug this belongs to (e.g., statins). drugUnit: URIRef # The unit in which the drug is measured, e.g. '5 mg tablet'. duns: URIRef # The Dun & Bradstreet DUNS number for identifying an organization or business person. duplicateTherapy: URIRef # A therapy that duplicates or overlaps this one. duration: URIRef # The duration of the item (movie, audio recording, event, etc.) in [ISO 8601 date format](http://en.wikipedia.org/wiki/ISO_8601). durationOfWarranty: URIRef # The duration of the warranty promise. Common unitCode values are ANN for year, MON for months, or DAY for days. duringMedia: URIRef # A media object representing the circumstances while performing this direction. earlyPrepaymentPenalty: URIRef # The amount to be paid as a penalty in the event of early payment of the loan. editEIDR: URIRef # An [EIDR](https://eidr.org/) (Entertainment Identifier Registry) [[identifier]] representing a specific edit / edition for a work of film or television. For example, the motion picture known as "Ghostbusters" whose [[titleEIDR]] is "10.5240/7EC7-228A-510A-053E-CBB8-J", has several edits e.g. "10.5240/1F2A-E1C5-680A-14C6-E76B-I" and "10.5240/8A35-3BEE-6497-5D12-9E4F-3". Since schema.org types like [[Movie]] and [[TVEpisode]] can be used for both works and their multiple expressions, it is possible to use [[titleEIDR]] alone (for a general description), or alongside [[editEIDR]] for a more edit-specific description. editor: URIRef # Specifies the Person who edited the CreativeWork. eduQuestionType: URIRef # For questions that are part of learning resources (e.g. Quiz), eduQuestionType indicates the format of question being given. Example: "Multiple choice", "Open ended", "Flashcard". educationRequirements: URIRef # Educational background needed for the position or Occupation. educationalAlignment: URIRef # An alignment to an established educational framework. This property should not be used where the nature of the alignment can be described using a simple property, for example to express that a resource [[teaches]] or [[assesses]] a competency. educationalCredentialAwarded: URIRef # A description of the qualification, award, certificate, diploma or other educational credential awarded as a consequence of successful completion of this course or program. educationalFramework: URIRef # The framework to which the resource being described is aligned. educationalLevel: URIRef # The level in terms of progression through an educational or training context. Examples of educational levels include 'beginner', 'intermediate' or 'advanced', and formal sets of level indicators. educationalProgramMode: URIRef # Similar to courseMode, The medium or means of delivery of the program as a whole. The value may either be a text label (e.g. "online", "onsite" or "blended"; "synchronous" or "asynchronous"; "full-time" or "part-time") or a URL reference to a term from a controlled vocabulary (e.g. https://ceds.ed.gov/element/001311#Asynchronous ). educationalRole: URIRef # An educationalRole of an EducationalAudience. educationalUse: URIRef # The purpose of a work in the context of education; for example, 'assignment', 'group work'. elevation: URIRef # The elevation of a location ([WGS 84](https://en.wikipedia.org/wiki/World_Geodetic_System)). Values may be of the form 'NUMBER UNIT_OF_MEASUREMENT' (e.g., '1,000 m', '3,200 ft') while numbers alone should be assumed to be a value in meters. eligibilityToWorkRequirement: URIRef # The legal requirements such as citizenship, visa and other documentation required for an applicant to this job. eligibleCustomerType: URIRef # The type(s) of customers for which the given offer is valid. eligibleDuration: URIRef # The duration for which the given offer is valid. eligibleQuantity: URIRef # The interval and unit of measurement of ordering quantities for which the offer or price specification is valid. This allows e.g. specifying that a certain freight charge is valid only for a certain quantity. eligibleRegion: URIRef # The ISO 3166-1 (ISO 3166-1 alpha-2) or ISO 3166-2 code, the place, or the GeoShape for the geo-political region(s) for which the offer or delivery charge specification is valid.\n\nSee also [[ineligibleRegion]]. eligibleTransactionVolume: URIRef # The transaction volume, in a monetary unit, for which the offer or price specification is valid, e.g. for indicating a minimal purchasing volume, to express free shipping above a certain order volume, or to limit the acceptance of credit cards to purchases to a certain minimal amount. email: URIRef # Email address. embedUrl: URIRef # A URL pointing to a player for a specific video. In general, this is the information in the ```src``` element of an ```embed``` tag and should not be the same as the content of the ```loc``` tag. embeddedTextCaption: URIRef # Represents textual captioning from a [[MediaObject]], e.g. text of a 'meme'. emissionsCO2: URIRef # The CO2 emissions in g/km. When used in combination with a QuantitativeValue, put "g/km" into the unitText property of that value, since there is no UN/CEFACT Common Code for "g/km". employee: URIRef # Someone working for this organization. employees: URIRef # People working for this organization. employerOverview: URIRef # A description of the employer, career opportunities and work environment for this position. employmentType: URIRef # Type of employment (e.g. full-time, part-time, contract, temporary, seasonal, internship). employmentUnit: URIRef # Indicates the department, unit and/or facility where the employee reports and/or in which the job is to be performed. encodesBioChemEntity: URIRef # Another BioChemEntity encoded by this one. encodesCreativeWork: URIRef # The CreativeWork encoded by this media object. encoding: URIRef # A media object that encodes this CreativeWork. This property is a synonym for associatedMedia. encodingFormat: URIRef # Media type typically expressed using a MIME format (see [IANA site](http://www.iana.org/assignments/media-types/media-types.xhtml) and [MDN reference](https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types)) e.g. application/zip for a SoftwareApplication binary, audio/mpeg for .mp3 etc.). In cases where a [[CreativeWork]] has several media type representations, [[encoding]] can be used to indicate each [[MediaObject]] alongside particular [[encodingFormat]] information. Unregistered or niche encoding and file formats can be indicated instead via the most appropriate URL, e.g. defining Web page or a Wikipedia/Wikidata entry. encodingType: URIRef # The supported encoding type(s) for an EntryPoint request. encodings: URIRef # A media object that encodes this CreativeWork. endDate: URIRef # The end date and time of the item (in [ISO 8601 date format](http://en.wikipedia.org/wiki/ISO_8601)). endOffset: URIRef # The end time of the clip expressed as the number of seconds from the beginning of the work. endTime: URIRef # The endTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation), the time that it is expected to end. For actions that span a period of time, when the action was performed. e.g. John wrote a book from January to *December*. For media, including audio and video, it's the time offset of the end of a clip within a larger file.\n\nNote that Event uses startDate/endDate instead of startTime/endTime, even when describing dates with times. This situation may be clarified in future revisions. endorsee: URIRef # A sub property of participant. The person/organization being supported. endorsers: URIRef # People or organizations that endorse the plan. energyEfficiencyScaleMax: URIRef # Specifies the most energy efficient class on the regulated EU energy consumption scale for the product category a product belongs to. For example, energy consumption for televisions placed on the market after January 1, 2020 is scaled from D to A+++. energyEfficiencyScaleMin: URIRef # Specifies the least energy efficient class on the regulated EU energy consumption scale for the product category a product belongs to. For example, energy consumption for televisions placed on the market after January 1, 2020 is scaled from D to A+++. engineDisplacement: URIRef # The volume swept by all of the pistons inside the cylinders of an internal combustion engine in a single movement. \n\nTypical unit code(s): CMQ for cubic centimeter, LTR for liters, INQ for cubic inches\n* Note 1: You can link to information about how the given value has been determined using the [[valueReference]] property.\n* Note 2: You can use [[minValue]] and [[maxValue]] to indicate ranges. enginePower: URIRef # The power of the vehicle's engine. Typical unit code(s): KWT for kilowatt, BHP for brake horsepower, N12 for metric horsepower (PS, with 1 PS = 735,49875 W)\n\n* Note 1: There are many different ways of measuring an engine's power. For an overview, see [http://en.wikipedia.org/wiki/Horsepower#Engine_power_test_codes](http://en.wikipedia.org/wiki/Horsepower#Engine_power_test_codes).\n* Note 2: You can link to information about how the given value has been determined using the [[valueReference]] property.\n* Note 3: You can use [[minValue]] and [[maxValue]] to indicate ranges. engineType: URIRef # The type of engine or engines powering the vehicle. entertainmentBusiness: URIRef # A sub property of location. The entertainment business where the action occurred. epidemiology: URIRef # The characteristics of associated patients, such as age, gender, race etc. episode: URIRef # An episode of a tv, radio or game media within a series or season. episodeNumber: URIRef # Position of the episode within an ordered group of episodes. episodes: URIRef # An episode of a TV/radio series or season. equal: URIRef # This ordering relation for qualitative values indicates that the subject is equal to the object. error: URIRef # For failed actions, more information on the cause of the failure. estimatedCost: URIRef # The estimated cost of the supply or supplies consumed when performing instructions. estimatedFlightDuration: URIRef # The estimated time the flight will take. estimatedSalary: URIRef # An estimated salary for a job posting or occupation, based on a variety of variables including, but not limited to industry, job title, and location. Estimated salaries are often computed by outside organizations rather than the hiring organization, who may not have committed to the estimated value. estimatesRiskOf: URIRef # The condition, complication, or symptom whose risk is being estimated. ethicsPolicy: URIRef # Statement about ethics policy, e.g. of a [[NewsMediaOrganization]] regarding journalistic and publishing practices, or of a [[Restaurant]], a page describing food source policies. In the case of a [[NewsMediaOrganization]], an ethicsPolicy is typically a statement describing the personal, organizational, and corporate standards of behavior expected by the organization. event: URIRef # Upcoming or past event associated with this place, organization, or action. eventAttendanceMode: URIRef # The eventAttendanceMode of an event indicates whether it occurs online, offline, or a mix. eventSchedule: URIRef # Associates an [[Event]] with a [[Schedule]]. There are circumstances where it is preferable to share a schedule for a series of repeating events rather than data on the individual events themselves. For example, a website or application might prefer to publish a schedule for a weekly gym class rather than provide data on every event. A schedule could be processed by applications to add forthcoming events to a calendar. An [[Event]] that is associated with a [[Schedule]] using this property should not have [[startDate]] or [[endDate]] properties. These are instead defined within the associated [[Schedule]], this avoids any ambiguity for clients using the data. The property might have repeated values to specify different schedules, e.g. for different months or seasons. eventStatus: URIRef # An eventStatus of an event represents its status; particularly useful when an event is cancelled or rescheduled. events: URIRef # Upcoming or past events associated with this place or organization. evidenceLevel: URIRef # Strength of evidence of the data used to formulate the guideline (enumerated). evidenceOrigin: URIRef # Source of the data used to formulate the guidance, e.g. RCT, consensus opinion, etc. exampleOfWork: URIRef # A creative work that this work is an example/instance/realization/derivation of. exceptDate: URIRef # Defines a [[Date]] or [[DateTime]] during which a scheduled [[Event]] will not take place. The property allows exceptions to a [[Schedule]] to be specified. If an exception is specified as a [[DateTime]] then only the event that would have started at that specific date and time should be excluded from the schedule. If an exception is specified as a [[Date]] then any event that is scheduled for that 24 hour period should be excluded from the schedule. This allows a whole day to be excluded from the schedule without having to itemise every scheduled event. exchangeRateSpread: URIRef # The difference between the price at which a broker or other intermediary buys and sells foreign currency. executableLibraryName: URIRef # Library file name e.g., mscorlib.dll, system.web.dll. exerciseCourse: URIRef # A sub property of location. The course where this action was taken. exercisePlan: URIRef # A sub property of instrument. The exercise plan used on this action. exerciseRelatedDiet: URIRef # A sub property of instrument. The diet used in this action. exerciseType: URIRef # Type(s) of exercise or activity, such as strength training, flexibility training, aerobics, cardiac rehabilitation, etc. exifData: URIRef # exif data for this object. expectedArrivalFrom: URIRef # The earliest date the package may arrive. expectedArrivalUntil: URIRef # The latest date the package may arrive. expectedPrognosis: URIRef # The likely outcome in either the short term or long term of the medical condition. expectsAcceptanceOf: URIRef # An Offer which must be accepted before the user can perform the Action. For example, the user may need to buy a movie before being able to watch it. experienceInPlaceOfEducation: URIRef # Indicates whether a [[JobPosting]] will accept experience (as indicated by [[OccupationalExperienceRequirements]]) in place of its formal educational qualifications (as indicated by [[educationRequirements]]). If true, indicates that satisfying one of these requirements is sufficient. experienceRequirements: URIRef # Description of skills and experience needed for the position or Occupation. expertConsiderations: URIRef # Medical expert advice related to the plan. expires: URIRef # Date the content expires and is no longer useful or available. For example a [[VideoObject]] or [[NewsArticle]] whose availability or relevance is time-limited, or a [[ClaimReview]] fact check whose publisher wants to indicate that it may no longer be relevant (or helpful to highlight) after some date. expressedIn: URIRef # Tissue, organ, biological sample, etc in which activity of this gene has been observed experimentally. For example brain, digestive system. familyName: URIRef # Family name. In the U.S., the last name of a Person. fatContent: URIRef # The number of grams of fat. faxNumber: URIRef # The fax number. featureList: URIRef # Features or modules provided by this application (and possibly required by other applications). feesAndCommissionsSpecification: URIRef # Description of fees, commissions, and other terms applied either to a class of financial product, or by a financial service organization. fiberContent: URIRef # The number of grams of fiber. fileFormat: URIRef # Media type, typically MIME format (see [IANA site](http://www.iana.org/assignments/media-types/media-types.xhtml)) of the content e.g. application/zip of a SoftwareApplication binary. In cases where a CreativeWork has several media type representations, 'encoding' can be used to indicate each MediaObject alongside particular fileFormat information. Unregistered or niche file formats can be indicated instead via the most appropriate URL, e.g. defining Web page or a Wikipedia entry. fileSize: URIRef # Size of the application / package (e.g. 18MB). In the absence of a unit (MB, KB etc.), KB will be assumed. financialAidEligible: URIRef # A financial aid type or program which students may use to pay for tuition or fees associated with the program. firstAppearance: URIRef # Indicates the first known occurence of a [[Claim]] in some [[CreativeWork]]. firstPerformance: URIRef # The date and place the work was first performed. flightDistance: URIRef # The distance of the flight. flightNumber: URIRef # The unique identifier for a flight including the airline IATA code. For example, if describing United flight 110, where the IATA code for United is 'UA', the flightNumber is 'UA110'. floorLevel: URIRef # The floor level for an [[Accommodation]] in a multi-storey building. Since counting systems [vary internationally](https://en.wikipedia.org/wiki/Storey#Consecutive_number_floor_designations), the local system should be used where possible. floorLimit: URIRef # A floor limit is the amount of money above which credit card transactions must be authorized. floorSize: URIRef # The size of the accommodation, e.g. in square meter or squarefoot. Typical unit code(s): MTK for square meter, FTK for square foot, or YDK for square yard followee: URIRef # A sub property of object. The person or organization being followed. follows: URIRef # The most generic uni-directional social relation. followup: URIRef # Typical or recommended followup care after the procedure is performed. foodEstablishment: URIRef # A sub property of location. The specific food establishment where the action occurred. foodEvent: URIRef # A sub property of location. The specific food event where the action occurred. foodWarning: URIRef # Any precaution, guidance, contraindication, etc. related to consumption of specific foods while taking this drug. founder: URIRef # A person who founded this organization. founders: URIRef # A person who founded this organization. foundingDate: URIRef # The date that this organization was founded. foundingLocation: URIRef # The place where the Organization was founded. free: URIRef # A flag to signal that the item, event, or place is accessible for free. freeShippingThreshold: URIRef # A monetary value above which (or equal to) the shipping rate becomes free. Intended to be used via an [[OfferShippingDetails]] with [[shippingSettingsLink]] matching this [[ShippingRateSettings]]. frequency: URIRef # How often the dose is taken, e.g. 'daily'. fromLocation: URIRef # A sub property of location. The original location of the object or the agent before the action. fuelCapacity: URIRef # The capacity of the fuel tank or in the case of electric cars, the battery. If there are multiple components for storage, this should indicate the total of all storage of the same type.\n\nTypical unit code(s): LTR for liters, GLL of US gallons, GLI for UK / imperial gallons, AMH for ampere-hours (for electrical vehicles). fuelConsumption: URIRef # The amount of fuel consumed for traveling a particular distance or temporal duration with the given vehicle (e.g. liters per 100 km).\n\n* Note 1: There are unfortunately no standard unit codes for liters per 100 km. Use [[unitText]] to indicate the unit of measurement, e.g. L/100 km.\n* Note 2: There are two ways of indicating the fuel consumption, [[fuelConsumption]] (e.g. 8 liters per 100 km) and [[fuelEfficiency]] (e.g. 30 miles per gallon). They are reciprocal.\n* Note 3: Often, the absolute value is useful only when related to driving speed ("at 80 km/h") or usage pattern ("city traffic"). You can use [[valueReference]] to link the value for the fuel consumption to another value. fuelEfficiency: URIRef # The distance traveled per unit of fuel used; most commonly miles per gallon (mpg) or kilometers per liter (km/L).\n\n* Note 1: There are unfortunately no standard unit codes for miles per gallon or kilometers per liter. Use [[unitText]] to indicate the unit of measurement, e.g. mpg or km/L.\n* Note 2: There are two ways of indicating the fuel consumption, [[fuelConsumption]] (e.g. 8 liters per 100 km) and [[fuelEfficiency]] (e.g. 30 miles per gallon). They are reciprocal.\n* Note 3: Often, the absolute value is useful only when related to driving speed ("at 80 km/h") or usage pattern ("city traffic"). You can use [[valueReference]] to link the value for the fuel economy to another value. fuelType: URIRef # The type of fuel suitable for the engine or engines of the vehicle. If the vehicle has only one engine, this property can be attached directly to the vehicle. functionalClass: URIRef # The degree of mobility the joint allows. fundedItem: URIRef # Indicates an item funded or sponsored through a [[Grant]]. funder: URIRef # A person or organization that supports (sponsors) something through some kind of financial contribution. game: URIRef # Video game which is played on this server. gameItem: URIRef # An item is an object within the game world that can be collected by a player or, occasionally, a non-player character. gameLocation: URIRef # Real or fictional location of the game (or part of game). gamePlatform: URIRef # The electronic systems used to play video games. gameServer: URIRef # The server on which it is possible to play the game. gameTip: URIRef # Links to tips, tactics, etc. gender: URIRef # Gender of something, typically a [[Person]], but possibly also fictional characters, animals, etc. While https://schema.org/Male and https://schema.org/Female may be used, text strings are also acceptable for people who do not identify as a binary gender. The [[gender]] property can also be used in an extended sense to cover e.g. the gender of sports teams. As with the gender of individuals, we do not try to enumerate all possibilities. A mixed-gender [[SportsTeam]] can be indicated with a text value of "Mixed". genre: URIRef # Genre of the creative work, broadcast channel or group. geo: URIRef # The geo coordinates of the place. geoContains: URIRef # Represents a relationship between two geometries (or the places they represent), relating a containing geometry to a contained geometry. "a contains b iff no points of b lie in the exterior of a, and at least one point of the interior of b lies in the interior of a". As defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM). geoCoveredBy: URIRef # Represents a relationship between two geometries (or the places they represent), relating a geometry to another that covers it. As defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM). geoCovers: URIRef # Represents a relationship between two geometries (or the places they represent), relating a covering geometry to a covered geometry. "Every point of b is a point of (the interior or boundary of) a". As defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM). geoCrosses: URIRef # Represents a relationship between two geometries (or the places they represent), relating a geometry to another that crosses it: "a crosses b: they have some but not all interior points in common, and the dimension of the intersection is less than that of at least one of them". As defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM). geoDisjoint: URIRef # Represents spatial relations in which two geometries (or the places they represent) are topologically disjoint: they have no point in common. They form a set of disconnected geometries." (a symmetric relationship, as defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM)) geoEquals: URIRef # Represents spatial relations in which two geometries (or the places they represent) are topologically equal, as defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM). "Two geometries are topologically equal if their interiors intersect and no part of the interior or boundary of one geometry intersects the exterior of the other" (a symmetric relationship) geoIntersects: URIRef # Represents spatial relations in which two geometries (or the places they represent) have at least one point in common. As defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM). geoMidpoint: URIRef # Indicates the GeoCoordinates at the centre of a GeoShape e.g. GeoCircle. geoOverlaps: URIRef # Represents a relationship between two geometries (or the places they represent), relating a geometry to another that geospatially overlaps it, i.e. they have some but not all points in common. As defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM). geoRadius: URIRef # Indicates the approximate radius of a GeoCircle (metres unless indicated otherwise via Distance notation). geoTouches: URIRef # Represents spatial relations in which two geometries (or the places they represent) touch: they have at least one boundary point in common, but no interior points." (a symmetric relationship, as defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM) ) geoWithin: URIRef # Represents a relationship between two geometries (or the places they represent), relating a geometry to one that contains it, i.e. it is inside (i.e. within) its interior. As defined in [DE-9IM](https://en.wikipedia.org/wiki/DE-9IM). geographicArea: URIRef # The geographic area associated with the audience. gettingTestedInfo: URIRef # Information about getting tested (for a [[MedicalCondition]]), e.g. in the context of a pandemic. givenName: URIRef # Given name. In the U.S., the first name of a Person. globalLocationNumber: URIRef # The [Global Location Number](http://www.gs1.org/gln) (GLN, sometimes also referred to as International Location Number or ILN) of the respective organization, person, or place. The GLN is a 13-digit number used to identify parties and physical locations. governmentBenefitsInfo: URIRef # governmentBenefitsInfo provides information about government benefits associated with a SpecialAnnouncement. gracePeriod: URIRef # The period of time after any due date that the borrower has to fulfil its obligations before a default (failure to pay) is deemed to have occurred. grantee: URIRef # The person, organization, contact point, or audience that has been granted this permission. greater: URIRef # This ordering relation for qualitative values indicates that the subject is greater than the object. greaterOrEqual: URIRef # This ordering relation for qualitative values indicates that the subject is greater than or equal to the object. gtin: URIRef # A Global Trade Item Number ([GTIN](https://www.gs1.org/standards/id-keys/gtin)). GTINs identify trade items, including products and services, using numeric identification codes. The [[gtin]] property generalizes the earlier [[gtin8]], [[gtin12]], [[gtin13]], and [[gtin14]] properties. The GS1 [digital link specifications](https://www.gs1.org/standards/Digital-Link/) express GTINs as URLs. A correct [[gtin]] value should be a valid GTIN, which means that it should be an all-numeric string of either 8, 12, 13 or 14 digits, or a "GS1 Digital Link" URL based on such a string. The numeric component should also have a [valid GS1 check digit](https://www.gs1.org/services/check-digit-calculator) and meet the other rules for valid GTINs. See also [GS1's GTIN Summary](http://www.gs1.org/barcodes/technical/idkeys/gtin) and [Wikipedia](https://en.wikipedia.org/wiki/Global_Trade_Item_Number) for more details. Left-padding of the gtin values is not required or encouraged. gtin12: URIRef # The GTIN-12 code of the product, or the product to which the offer refers. The GTIN-12 is the 12-digit GS1 Identification Key composed of a U.P.C. Company Prefix, Item Reference, and Check Digit used to identify trade items. See [GS1 GTIN Summary](http://www.gs1.org/barcodes/technical/idkeys/gtin) for more details. gtin13: URIRef # The GTIN-13 code of the product, or the product to which the offer refers. This is equivalent to 13-digit ISBN codes and EAN UCC-13. Former 12-digit UPC codes can be converted into a GTIN-13 code by simply adding a preceding zero. See [GS1 GTIN Summary](http://www.gs1.org/barcodes/technical/idkeys/gtin) for more details. gtin14: URIRef # The GTIN-14 code of the product, or the product to which the offer refers. See [GS1 GTIN Summary](http://www.gs1.org/barcodes/technical/idkeys/gtin) for more details. gtin8: URIRef # The GTIN-8 code of the product, or the product to which the offer refers. This code is also known as EAN/UCC-8 or 8-digit EAN. See [GS1 GTIN Summary](http://www.gs1.org/barcodes/technical/idkeys/gtin) for more details. guideline: URIRef # A medical guideline related to this entity. guidelineDate: URIRef # Date on which this guideline's recommendation was made. guidelineSubject: URIRef # The medical conditions, treatments, etc. that are the subject of the guideline. handlingTime: URIRef # The typical delay between the receipt of the order and the goods either leaving the warehouse or being prepared for pickup, in case the delivery method is on site pickup. Typical properties: minValue, maxValue, unitCode (d for DAY). This is by common convention assumed to mean business days (if a unitCode is used, coded as "d"), i.e. only counting days when the business normally operates. hasBioChemEntityPart: URIRef # Indicates a BioChemEntity that (in some sense) has this BioChemEntity as a part. hasBioPolymerSequence: URIRef # A symbolic representation of a BioChemEnity. For example, a nucleotide sequence of a Gene or an amino acid sequence of a Protein. hasBroadcastChannel: URIRef # A broadcast channel of a broadcast service. hasCategoryCode: URIRef # A Category code contained in this code set. hasCourse: URIRef # A course or class that is one of the learning opportunities that constitute an educational / occupational program. No information is implied about whether the course is mandatory or optional; no guarantee is implied about whether the course will be available to everyone on the program. hasCourseInstance: URIRef # An offering of the course at a specific time and place or through specific media or mode of study or to a specific section of students. hasCredential: URIRef # A credential awarded to the Person or Organization. hasDefinedTerm: URIRef # A Defined Term contained in this term set. hasDeliveryMethod: URIRef # Method used for delivery or shipping. hasDigitalDocumentPermission: URIRef # A permission related to the access to this document (e.g. permission to read or write an electronic document). For a public document, specify a grantee with an Audience with audienceType equal to "public". hasDriveThroughService: URIRef # Indicates whether some facility (e.g. [[FoodEstablishment]], [[CovidTestingFacility]]) offers a service that can be used by driving through in a car. In the case of [[CovidTestingFacility]] such facilities could potentially help with social distancing from other potentially-infected users. hasEnergyConsumptionDetails: URIRef # Defines the energy efficiency Category (also known as "class" or "rating") for a product according to an international energy efficiency standard. hasEnergyEfficiencyCategory: URIRef # Defines the energy efficiency Category (which could be either a rating out of range of values or a yes/no certification) for a product according to an international energy efficiency standard. hasHealthAspect: URIRef # Indicates the aspect or aspects specifically addressed in some [[HealthTopicContent]]. For example, that the content is an overview, or that it talks about treatment, self-care, treatments or their side-effects. hasMap: URIRef # A URL to a map of the place. hasMeasurement: URIRef # A product measurement, for example the inseam of pants, the wheel size of a bicycle, or the gauge of a screw. Usually an exact measurement, but can also be a range of measurements for adjustable products, for example belts and ski bindings. hasMenu: URIRef # Either the actual menu as a structured representation, as text, or a URL of the menu. hasMenuItem: URIRef # A food or drink item contained in a menu or menu section. hasMenuSection: URIRef # A subgrouping of the menu (by dishes, course, serving time period, etc.). hasMerchantReturnPolicy: URIRef # Specifies a MerchantReturnPolicy that may be applicable. hasMolecularFunction: URIRef # Molecular function performed by this BioChemEntity; please use PropertyValue if you want to include any evidence. hasOccupation: URIRef # The Person's occupation. For past professions, use Role for expressing dates. hasOfferCatalog: URIRef # Indicates an OfferCatalog listing for this Organization, Person, or Service. hasPOS: URIRef # Points-of-Sales operated by the organization or person. hasPart: URIRef # Indicates an item or CreativeWork that is part of this item, or CreativeWork (in some sense). hasRepresentation: URIRef # A common representation such as a protein sequence or chemical structure for this entity. For images use schema.org/image. hasVariant: URIRef # Indicates a [[Product]] that is a member of this [[ProductGroup]] (or [[ProductModel]]). headline: URIRef # Headline of the article. healthCondition: URIRef # Specifying the health condition(s) of a patient, medical study, or other target audience. healthPlanCoinsuranceOption: URIRef # Whether the coinsurance applies before or after deductible, etc. TODO: Is this a closed set? healthPlanCoinsuranceRate: URIRef # Whether The rate of coinsurance expressed as a number between 0.0 and 1.0. healthPlanCopay: URIRef # Whether The copay amount. healthPlanCopayOption: URIRef # Whether the copay is before or after deductible, etc. TODO: Is this a closed set? healthPlanCostSharing: URIRef # Whether The costs to the patient for services under this network or formulary. healthPlanDrugOption: URIRef # TODO. healthPlanDrugTier: URIRef # The tier(s) of drugs offered by this formulary or insurance plan. healthPlanId: URIRef # The 14-character, HIOS-generated Plan ID number. (Plan IDs must be unique, even across different markets.) healthPlanMarketingUrl: URIRef # The URL that goes directly to the plan brochure for the specific standard plan or plan variation. healthPlanNetworkId: URIRef # Name or unique ID of network. (Networks are often reused across different insurance plans). healthPlanNetworkTier: URIRef # The tier(s) for this network. healthPlanPharmacyCategory: URIRef # The category or type of pharmacy associated with this cost sharing. healthcareReportingData: URIRef # Indicates data describing a hospital, e.g. a CDC [[CDCPMDRecord]] or as some kind of [[Dataset]]. height: URIRef # The height of the item. highPrice: URIRef # The highest price of all offers available.\n\nUsage guidelines:\n\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator. hiringOrganization: URIRef # Organization offering the job position. holdingArchive: URIRef # [[ArchiveOrganization]] that holds, keeps or maintains the [[ArchiveComponent]]. homeLocation: URIRef # A contact location for a person's residence. homeTeam: URIRef # The home team in a sports event. honorificPrefix: URIRef # An honorific prefix preceding a Person's name such as Dr/Mrs/Mr. honorificSuffix: URIRef # An honorific suffix following a Person's name such as M.D. /PhD/MSCSW. hospitalAffiliation: URIRef # A hospital with which the physician or office is affiliated. hostingOrganization: URIRef # The organization (airline, travelers' club, etc.) the membership is made with. hoursAvailable: URIRef # The hours during which this service or contact is available. howPerformed: URIRef # How the procedure is performed. httpMethod: URIRef # An HTTP method that specifies the appropriate HTTP method for a request to an HTTP EntryPoint. Values are capitalized strings as used in HTTP. iataCode: URIRef # IATA identifier for an airline or airport. icaoCode: URIRef # ICAO identifier for an airport. identifier: URIRef # The identifier property represents any kind of identifier for any kind of [[Thing]], such as ISBNs, GTIN codes, UUIDs etc. Schema.org provides dedicated properties for representing many of these, either as textual strings or as URL (URI) links. See [background notes](/docs/datamodel.html#identifierBg) for more details. identifyingExam: URIRef # A physical examination that can identify this sign. identifyingTest: URIRef # A diagnostic test that can identify this sign. illustrator: URIRef # The illustrator of the book. image: URIRef # An image of the item. This can be a [[URL]] or a fully described [[ImageObject]]. imagingTechnique: URIRef # Imaging technique used. inAlbum: URIRef # The album to which this recording belongs. inBroadcastLineup: URIRef # The CableOrSatelliteService offering the channel. inChI: URIRef # Non-proprietary identifier for molecular entity that can be used in printed and electronic data sources thus enabling easier linking of diverse data compilations. inChIKey: URIRef # InChIKey is a hashed version of the full InChI (using the SHA-256 algorithm). inCodeSet: URIRef # A [[CategoryCodeSet]] that contains this category code. inDefinedTermSet: URIRef # A [[DefinedTermSet]] that contains this term. inLanguage: URIRef # The language of the content or performance or used in an action. Please use one of the language codes from the [IETF BCP 47 standard](http://tools.ietf.org/html/bcp47). See also [[availableLanguage]]. inPlaylist: URIRef # The playlist to which this recording belongs. inProductGroupWithID: URIRef # Indicates the [[productGroupID]] for a [[ProductGroup]] that this product [[isVariantOf]]. inStoreReturnsOffered: URIRef # Are in-store returns offered? (for more advanced return methods use the [[returnMethod]] property) inSupportOf: URIRef # Qualification, candidature, degree, application that Thesis supports. incentiveCompensation: URIRef # Description of bonus and commission compensation aspects of the job. incentives: URIRef # Description of bonus and commission compensation aspects of the job. includedComposition: URIRef # Smaller compositions included in this work (e.g. a movement in a symphony). includedDataCatalog: URIRef # A data catalog which contains this dataset (this property was previously 'catalog', preferred name is now 'includedInDataCatalog'). includedInDataCatalog: URIRef # A data catalog which contains this dataset. includedInHealthInsurancePlan: URIRef # The insurance plans that cover this drug. includedRiskFactor: URIRef # A modifiable or non-modifiable risk factor included in the calculation, e.g. age, coexisting condition. includesAttraction: URIRef # Attraction located at destination. includesHealthPlanFormulary: URIRef # Formularies covered by this plan. includesHealthPlanNetwork: URIRef # Networks covered by this plan. includesObject: URIRef # This links to a node or nodes indicating the exact quantity of the products included in an [[Offer]] or [[ProductCollection]]. increasesRiskOf: URIRef # The condition, complication, etc. influenced by this factor. industry: URIRef # The industry associated with the job position. ineligibleRegion: URIRef # The ISO 3166-1 (ISO 3166-1 alpha-2) or ISO 3166-2 code, the place, or the GeoShape for the geo-political region(s) for which the offer or delivery charge specification is not valid, e.g. a region where the transaction is not allowed.\n\nSee also [[eligibleRegion]]. infectiousAgent: URIRef # The actual infectious agent, such as a specific bacterium. infectiousAgentClass: URIRef # The class of infectious agent (bacteria, prion, etc.) that causes the disease. ingredients: URIRef # A single ingredient used in the recipe, e.g. sugar, flour or garlic. inker: URIRef # The individual who traces over the pencil drawings in ink after pencils are complete. insertion: URIRef # The place of attachment of a muscle, or what the muscle moves. installUrl: URIRef # URL at which the app may be installed, if different from the URL of the item. instructor: URIRef # A person assigned to instruct or provide instructional assistance for the [[CourseInstance]]. instrument: URIRef # The object that helped the agent perform the action. e.g. John wrote a book with *a pen*. intensity: URIRef # Quantitative measure gauging the degree of force involved in the exercise, for example, heartbeats per minute. May include the velocity of the movement. interactingDrug: URIRef # Another drug that is known to interact with this drug in a way that impacts the effect of this drug or causes a risk to the patient. Note: disease interactions are typically captured as contraindications. interactionCount: URIRef # This property is deprecated, alongside the UserInteraction types on which it depended. interactionService: URIRef # The WebSite or SoftwareApplication where the interactions took place. interactionStatistic: URIRef # The number of interactions for the CreativeWork using the WebSite or SoftwareApplication. The most specific child type of InteractionCounter should be used. interactionType: URIRef # The Action representing the type of interaction. For up votes, +1s, etc. use [[LikeAction]]. For down votes use [[DislikeAction]]. Otherwise, use the most specific Action. interactivityType: URIRef # The predominant mode of learning supported by the learning resource. Acceptable values are 'active', 'expositive', or 'mixed'. interestRate: URIRef # The interest rate, charged or paid, applicable to the financial product. Note: This is different from the calculated annualPercentageRate. interpretedAsClaim: URIRef # Used to indicate a specific claim contained, implied, translated or refined from the content of a [[MediaObject]] or other [[CreativeWork]]. The interpreting party can be indicated using [[claimInterpreter]]. inventoryLevel: URIRef # The current approximate inventory level for the item or items. inverseOf: URIRef # Relates a property to a property that is its inverse. Inverse properties relate the same pairs of items to each other, but in reversed direction. For example, the 'alumni' and 'alumniOf' properties are inverseOf each other. Some properties don't have explicit inverses; in these situations RDFa and JSON-LD syntax for reverse properties can be used. isAcceptingNewPatients: URIRef # Whether the provider is accepting new patients. isAccessibleForFree: URIRef # A flag to signal that the item, event, or place is accessible for free. isAccessoryOrSparePartFor: URIRef # A pointer to another product (or multiple products) for which this product is an accessory or spare part. isAvailableGenerically: URIRef # True if the drug is available in a generic form (regardless of name). isBasedOn: URIRef # A resource from which this work is derived or from which it is a modification or adaption. isBasedOnUrl: URIRef # A resource that was used in the creation of this resource. This term can be repeated for multiple sources. For example, http://example.com/great-multiplication-intro.html. isConsumableFor: URIRef # A pointer to another product (or multiple products) for which this product is a consumable. isEncodedByBioChemEntity: URIRef # Another BioChemEntity encoding by this one. isFamilyFriendly: URIRef # Indicates whether this content is family friendly. isGift: URIRef # Was the offer accepted as a gift for someone other than the buyer. isInvolvedInBiologicalProcess: URIRef # Biological process this BioChemEntity is involved in; please use PropertyValue if you want to include any evidence. isLiveBroadcast: URIRef # True if the broadcast is of a live event. isLocatedInSubcellularLocation: URIRef # Subcellular location where this BioChemEntity is located; please use PropertyValue if you want to include any evidence. isPartOf: URIRef # Indicates an item or CreativeWork that this item, or CreativeWork (in some sense), is part of. isPartOfBioChemEntity: URIRef # Indicates a BioChemEntity that is (in some sense) a part of this BioChemEntity. isPlanForApartment: URIRef # Indicates some accommodation that this floor plan describes. isProprietary: URIRef # True if this item's name is a proprietary/brand name (vs. generic name). isRelatedTo: URIRef # A pointer to another, somehow related product (or multiple products). isResizable: URIRef # Whether the 3DModel allows resizing. For example, room layout applications often do not allow 3DModel elements to be resized to reflect reality. isSimilarTo: URIRef # A pointer to another, functionally similar product (or multiple products). isUnlabelledFallback: URIRef # This can be marked 'true' to indicate that some published [[DeliveryTimeSettings]] or [[ShippingRateSettings]] are intended to apply to all [[OfferShippingDetails]] published by the same merchant, when referenced by a [[shippingSettingsLink]] in those settings. It is not meaningful to use a 'true' value for this property alongside a transitTimeLabel (for [[DeliveryTimeSettings]]) or shippingLabel (for [[ShippingRateSettings]]), since this property is for use with unlabelled settings. isVariantOf: URIRef # Indicates the kind of product that this is a variant of. In the case of [[ProductModel]], this is a pointer (from a ProductModel) to a base product from which this product is a variant. It is safe to infer that the variant inherits all product features from the base model, unless defined locally. This is not transitive. In the case of a [[ProductGroup]], the group description also serves as a template, representing a set of Products that vary on explicitly defined, specific dimensions only (so it defines both a set of variants, as well as which values distinguish amongst those variants). When used with [[ProductGroup]], this property can apply to any [[Product]] included in the group. isbn: URIRef # The ISBN of the book. isicV4: URIRef # The International Standard of Industrial Classification of All Economic Activities (ISIC), Revision 4 code for a particular organization, business person, or place. isrcCode: URIRef # The International Standard Recording Code for the recording. issn: URIRef # The International Standard Serial Number (ISSN) that identifies this serial publication. You can repeat this property to identify different formats of, or the linking ISSN (ISSN-L) for, this serial publication. issueNumber: URIRef # Identifies the issue of publication; for example, "iii" or "2". issuedBy: URIRef # The organization issuing the ticket or permit. issuedThrough: URIRef # The service through with the permit was granted. iswcCode: URIRef # The International Standard Musical Work Code for the composition. item: URIRef # An entity represented by an entry in a list or data feed (e.g. an 'artist' in a list of 'artists')’. itemCondition: URIRef # A predefined value from OfferItemCondition specifying the condition of the product or service, or the products or services included in the offer. Also used for product return policies to specify the condition of products accepted for returns. itemDefectReturnFees: URIRef # The type of return fees for returns of defect products. itemDefectReturnLabelSource: URIRef # The method (from an enumeration) by which the customer obtains a return shipping label for a defect product. itemDefectReturnShippingFeesAmount: URIRef # Amount of shipping costs for defect product returns. Applicable when property [[itemDefectReturnFees]] equals [[ReturnShippingFees]]. itemListElement: URIRef # For itemListElement values, you can use simple strings (e.g. "Peter", "Paul", "Mary"), existing entities, or use ListItem.\n\nText values are best if the elements in the list are plain strings. Existing entities are best for a simple, unordered list of existing things in your data. ListItem is used with ordered lists when you want to provide additional context about the element in that list or when the same item might be in different places in different lists.\n\nNote: The order of elements in your mark-up is not sufficient for indicating the order or elements. Use ListItem with a 'position' property in such cases. itemListOrder: URIRef # Type of ordering (e.g. Ascending, Descending, Unordered). itemLocation: URIRef # Current location of the item. itemOffered: URIRef # An item being offered (or demanded). The transactional nature of the offer or demand is documented using [[businessFunction]], e.g. sell, lease etc. While several common expected types are listed explicitly in this definition, others can be used. Using a second type, such as Product or a subtype of Product, can clarify the nature of the offer. itemReviewed: URIRef # The item that is being reviewed/rated. itemShipped: URIRef # Item(s) being shipped. itinerary: URIRef # Destination(s) ( [[Place]] ) that make up a trip. For a trip where destination order is important use [[ItemList]] to specify that order (see examples). iupacName: URIRef # Systematic method of naming chemical compounds as recommended by the International Union of Pure and Applied Chemistry (IUPAC). jobBenefits: URIRef # Description of benefits associated with the job. jobImmediateStart: URIRef # An indicator as to whether a position is available for an immediate start. jobLocation: URIRef # A (typically single) geographic location associated with the job position. jobLocationType: URIRef # A description of the job location (e.g TELECOMMUTE for telecommute jobs). jobStartDate: URIRef # The date on which a successful applicant for this job would be expected to start work. Choose a specific date in the future or use the jobImmediateStart property to indicate the position is to be filled as soon as possible. jobTitle: URIRef # The job title of the person (for example, Financial Manager). jurisdiction: URIRef # Indicates a legal jurisdiction, e.g. of some legislation, or where some government service is based. keywords: URIRef # Keywords or tags used to describe this content. Multiple entries in a keywords list are typically delimited by commas. knownVehicleDamages: URIRef # A textual description of known damages, both repaired and unrepaired. knows: URIRef # The most generic bi-directional social/work relation. knowsAbout: URIRef # Of a [[Person]], and less typically of an [[Organization]], to indicate a topic that is known about - suggesting possible expertise but not implying it. We do not distinguish skill levels here, or relate this to educational content, events, objectives or [[JobPosting]] descriptions. knowsLanguage: URIRef # Of a [[Person]], and less typically of an [[Organization]], to indicate a known language. We do not distinguish skill levels or reading/writing/speaking/signing here. Use language codes from the [IETF BCP 47 standard](http://tools.ietf.org/html/bcp47). labelDetails: URIRef # Link to the drug's label details. landlord: URIRef # A sub property of participant. The owner of the real estate property. language: URIRef # A sub property of instrument. The language used on this action. lastReviewed: URIRef # Date on which the content on this web page was last reviewed for accuracy and/or completeness. latitude: URIRef # The latitude of a location. For example ```37.42242``` ([WGS 84](https://en.wikipedia.org/wiki/World_Geodetic_System)). layoutImage: URIRef # A schematic image showing the floorplan layout. learningResourceType: URIRef # The predominant type or kind characterizing the learning resource. For example, 'presentation', 'handout'. leaseLength: URIRef # Length of the lease for some [[Accommodation]], either particular to some [[Offer]] or in some cases intrinsic to the property. legalName: URIRef # The official name of the organization, e.g. the registered company name. legalStatus: URIRef # The drug or supplement's legal status, including any controlled substance schedules that apply. legislationApplies: URIRef # Indicates that this legislation (or part of a legislation) somehow transfers another legislation in a different legislative context. This is an informative link, and it has no legal value. For legally-binding links of transposition, use the legislationTransposes property. For example an informative consolidated law of a European Union's member state "applies" the consolidated version of the European Directive implemented in it. legislationChanges: URIRef # Another legislation that this legislation changes. This encompasses the notions of amendment, replacement, correction, repeal, or other types of change. This may be a direct change (textual or non-textual amendment) or a consequential or indirect change. The property is to be used to express the existence of a change relationship between two acts rather than the existence of a consolidated version of the text that shows the result of the change. For consolidation relationships, use the legislationConsolidates property. legislationConsolidates: URIRef # Indicates another legislation taken into account in this consolidated legislation (which is usually the product of an editorial process that revises the legislation). This property should be used multiple times to refer to both the original version or the previous consolidated version, and to the legislations making the change. legislationDate: URIRef # The date of adoption or signature of the legislation. This is the date at which the text is officially aknowledged to be a legislation, even though it might not even be published or in force. legislationDateVersion: URIRef # The point-in-time at which the provided description of the legislation is valid (e.g. : when looking at the law on the 2016-04-07 (= dateVersion), I get the consolidation of 2015-04-12 of the "National Insurance Contributions Act 2015") legislationIdentifier: URIRef # An identifier for the legislation. This can be either a string-based identifier, like the CELEX at EU level or the NOR in France, or a web-based, URL/URI identifier, like an ELI (European Legislation Identifier) or an URN-Lex. legislationJurisdiction: URIRef # The jurisdiction from which the legislation originates. legislationLegalForce: URIRef # Whether the legislation is currently in force, not in force, or partially in force. legislationLegalValue: URIRef # The legal value of this legislation file. The same legislation can be written in multiple files with different legal values. Typically a digitally signed PDF have a "stronger" legal value than the HTML file of the same act. legislationPassedBy: URIRef # The person or organization that originally passed or made the law : typically parliament (for primary legislation) or government (for secondary legislation). This indicates the "legal author" of the law, as opposed to its physical author. legislationResponsible: URIRef # An individual or organization that has some kind of responsibility for the legislation. Typically the ministry who is/was in charge of elaborating the legislation, or the adressee for potential questions about the legislation once it is published. legislationTransposes: URIRef # Indicates that this legislation (or part of legislation) fulfills the objectives set by another legislation, by passing appropriate implementation measures. Typically, some legislations of European Union's member states or regions transpose European Directives. This indicates a legally binding link between the 2 legislations. legislationType: URIRef # The type of the legislation. Examples of values are "law", "act", "directive", "decree", "regulation", "statutory instrument", "loi organique", "règlement grand-ducal", etc., depending on the country. leiCode: URIRef # An organization identifier that uniquely identifies a legal entity as defined in ISO 17442. lender: URIRef # A sub property of participant. The person that lends the object being borrowed. lesser: URIRef # This ordering relation for qualitative values indicates that the subject is lesser than the object. lesserOrEqual: URIRef # This ordering relation for qualitative values indicates that the subject is lesser than or equal to the object. letterer: URIRef # The individual who adds lettering, including speech balloons and sound effects, to artwork. license: URIRef # A license document that applies to this content, typically indicated by URL. line: URIRef # A line is a point-to-point path consisting of two or more points. A line is expressed as a series of two or more point objects separated by space. linkRelationship: URIRef # Indicates the relationship type of a Web link. liveBlogUpdate: URIRef # An update to the LiveBlog. loanMortgageMandateAmount: URIRef # Amount of mortgage mandate that can be converted into a proper mortgage at a later stage. loanPaymentAmount: URIRef # The amount of money to pay in a single payment. loanPaymentFrequency: URIRef # Frequency of payments due, i.e. number of months between payments. This is defined as a frequency, i.e. the reciprocal of a period of time. loanRepaymentForm: URIRef # A form of paying back money previously borrowed from a lender. Repayment usually takes the form of periodic payments that normally include part principal plus interest in each payment. loanTerm: URIRef # The duration of the loan or credit agreement. loanType: URIRef # The type of a loan or credit. location: URIRef # The location of, for example, where an event is happening, where an organization is located, or where an action takes place. locationCreated: URIRef # The location where the CreativeWork was created, which may not be the same as the location depicted in the CreativeWork. lodgingUnitDescription: URIRef # A full description of the lodging unit. lodgingUnitType: URIRef # Textual description of the unit type (including suite vs. room, size of bed, etc.). logo: URIRef # An associated logo. longitude: URIRef # The longitude of a location. For example ```-122.08585``` ([WGS 84](https://en.wikipedia.org/wiki/World_Geodetic_System)). loser: URIRef # A sub property of participant. The loser of the action. lowPrice: URIRef # The lowest price of all offers available.\n\nUsage guidelines:\n\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator. lyricist: URIRef # The person who wrote the words. lyrics: URIRef # The words in the song. mainContentOfPage: URIRef # Indicates if this web page element is the main subject of the page. mainEntity: URIRef # Indicates the primary entity described in some page or other CreativeWork. mainEntityOfPage: URIRef # Indicates a page (or other CreativeWork) for which this thing is the main entity being described. See [background notes](/docs/datamodel.html#mainEntityBackground) for details. maintainer: URIRef # A maintainer of a [[Dataset]], software package ([[SoftwareApplication]]), or other [[Project]]. A maintainer is a [[Person]] or [[Organization]] that manages contributions to, and/or publication of, some (typically complex) artifact. It is common for distributions of software and data to be based on "upstream" sources. When [[maintainer]] is applied to a specific version of something e.g. a particular version or packaging of a [[Dataset]], it is always possible that the upstream source has a different maintainer. The [[isBasedOn]] property can be used to indicate such relationships between datasets to make the different maintenance roles clear. Similarly in the case of software, a package may have dedicated maintainers working on integration into software distributions such as Ubuntu, as well as upstream maintainers of the underlying work. makesOffer: URIRef # A pointer to products or services offered by the organization or person. manufacturer: URIRef # The manufacturer of the product. map: URIRef # A URL to a map of the place. mapType: URIRef # Indicates the kind of Map, from the MapCategoryType Enumeration. maps: URIRef # A URL to a map of the place. marginOfError: URIRef # A marginOfError for an [[Observation]]. masthead: URIRef # For a [[NewsMediaOrganization]], a link to the masthead page or a page listing top editorial management. material: URIRef # A material that something is made from, e.g. leather, wool, cotton, paper. materialExtent: URIRef # The quantity of the materials being described or an expression of the physical space they occupy. mathExpression: URIRef # A mathematical expression (e.g. 'x^2-3x=0') that may be solved for a specific variable, simplified, or transformed. This can take many formats, e.g. LaTeX, Ascii-Math, or math as you would write with a keyboard. maxPrice: URIRef # The highest price if the price is a range. maxValue: URIRef # The upper value of some characteristic or property. maximumAttendeeCapacity: URIRef # The total number of individuals that may attend an event or venue. maximumEnrollment: URIRef # The maximum number of students who may be enrolled in the program. maximumIntake: URIRef # Recommended intake of this supplement for a given population as defined by a specific recommending authority. maximumPhysicalAttendeeCapacity: URIRef # The maximum physical attendee capacity of an [[Event]] whose [[eventAttendanceMode]] is [[OfflineEventAttendanceMode]] (or the offline aspects, in the case of a [[MixedEventAttendanceMode]]). maximumVirtualAttendeeCapacity: URIRef # The maximum physical attendee capacity of an [[Event]] whose [[eventAttendanceMode]] is [[OnlineEventAttendanceMode]] (or the online aspects, in the case of a [[MixedEventAttendanceMode]]). mealService: URIRef # Description of the meals that will be provided or available for purchase. measuredProperty: URIRef # The measuredProperty of an [[Observation]], either a schema.org property, a property from other RDF-compatible systems e.g. W3C RDF Data Cube, or schema.org extensions such as [GS1's](https://www.gs1.org/voc/?show=properties). measuredValue: URIRef # The measuredValue of an [[Observation]]. measurementTechnique: URIRef # A technique or technology used in a [[Dataset]] (or [[DataDownload]], [[DataCatalog]]), corresponding to the method used for measuring the corresponding variable(s) (described using [[variableMeasured]]). This is oriented towards scientific and scholarly dataset publication but may have broader applicability; it is not intended as a full representation of measurement, but rather as a high level summary for dataset discovery. For example, if [[variableMeasured]] is: molecule concentration, [[measurementTechnique]] could be: "mass spectrometry" or "nmr spectroscopy" or "colorimetry" or "immunofluorescence". If the [[variableMeasured]] is "depression rating", the [[measurementTechnique]] could be "Zung Scale" or "HAM-D" or "Beck Depression Inventory". If there are several [[variableMeasured]] properties recorded for some given data object, use a [[PropertyValue]] for each [[variableMeasured]] and attach the corresponding [[measurementTechnique]]. mechanismOfAction: URIRef # The specific biochemical interaction through which this drug or supplement produces its pharmacological effect. mediaAuthenticityCategory: URIRef # Indicates a MediaManipulationRatingEnumeration classification of a media object (in the context of how it was published or shared). mediaItemAppearance: URIRef # In the context of a [[MediaReview]], indicates specific media item(s) that are grouped using a [[MediaReviewItem]]. median: URIRef # The median value. medicalAudience: URIRef # Medical audience for page. medicalSpecialty: URIRef # A medical specialty of the provider. medicineSystem: URIRef # The system of medicine that includes this MedicalEntity, for example 'evidence-based', 'homeopathic', 'chiropractic', etc. meetsEmissionStandard: URIRef # Indicates that the vehicle meets the respective emission standard. member: URIRef # A member of an Organization or a ProgramMembership. Organizations can be members of organizations; ProgramMembership is typically for individuals. memberOf: URIRef # An Organization (or ProgramMembership) to which this Person or Organization belongs. members: URIRef # A member of this organization. membershipNumber: URIRef # A unique identifier for the membership. membershipPointsEarned: URIRef # The number of membership points earned by the member. If necessary, the unitText can be used to express the units the points are issued in. (e.g. stars, miles, etc.) memoryRequirements: URIRef # Minimum memory requirements. mentions: URIRef # Indicates that the CreativeWork contains a reference to, but is not necessarily about a concept. menu: URIRef # Either the actual menu as a structured representation, as text, or a URL of the menu. menuAddOn: URIRef # Additional menu item(s) such as a side dish of salad or side order of fries that can be added to this menu item. Additionally it can be a menu section containing allowed add-on menu items for this menu item. merchant: URIRef # 'merchant' is an out-dated term for 'seller'. merchantReturnDays: URIRef # Specifies either a fixed return date or the number of days (from the delivery date) that a product can be returned. Used when the [[returnPolicyCategory]] property is specified as [[MerchantReturnFiniteReturnWindow]]. merchantReturnLink: URIRef # Specifies a Web page or service by URL, for product returns. messageAttachment: URIRef # A CreativeWork attached to the message. mileageFromOdometer: URIRef # The total distance travelled by the particular vehicle since its initial production, as read from its odometer.\n\nTypical unit code(s): KMT for kilometers, SMI for statute miles minPrice: URIRef # The lowest price if the price is a range. minValue: URIRef # The lower value of some characteristic or property. minimumPaymentDue: URIRef # The minimum payment required at this time. missionCoveragePrioritiesPolicy: URIRef # For a [[NewsMediaOrganization]], a statement on coverage priorities, including any public agenda or stance on issues. model: URIRef # The model of the product. Use with the URL of a ProductModel or a textual representation of the model identifier. The URL of the ProductModel can be from an external source. It is recommended to additionally provide strong product identifiers via the gtin8/gtin13/gtin14 and mpn properties. modelDate: URIRef # The release date of a vehicle model (often used to differentiate versions of the same make and model). modifiedTime: URIRef # The date and time the reservation was modified. molecularFormula: URIRef # The empirical formula is the simplest whole number ratio of all the atoms in a molecule. molecularWeight: URIRef # This is the molecular weight of the entity being described, not of the parent. Units should be included in the form '<Number> <unit>', for example '12 amu' or as '<QuantitativeValue>. monoisotopicMolecularWeight: URIRef # The monoisotopic mass is the sum of the masses of the atoms in a molecule using the unbound, ground-state, rest mass of the principal (most abundant) isotope for each element instead of the isotopic average mass. Please include the units the form '<Number> <unit>', for example '770.230488 g/mol' or as '<QuantitativeValue>. monthlyMinimumRepaymentAmount: URIRef # The minimum payment is the lowest amount of money that one is required to pay on a credit card statement each month. monthsOfExperience: URIRef # Indicates the minimal number of months of experience required for a position. mpn: URIRef # The Manufacturer Part Number (MPN) of the product, or the product to which the offer refers. multipleValues: URIRef # Whether multiple values are allowed for the property. Default is false. muscleAction: URIRef # The movement the muscle generates. musicArrangement: URIRef # An arrangement derived from the composition. musicBy: URIRef # The composer of the soundtrack. musicCompositionForm: URIRef # The type of composition (e.g. overture, sonata, symphony, etc.). musicGroupMember: URIRef # A member of a music group—for example, John, Paul, George, or Ringo. musicReleaseFormat: URIRef # Format of this release (the type of recording media used, ie. compact disc, digital media, LP, etc.). musicalKey: URIRef # The key, mode, or scale this composition uses. naics: URIRef # The North American Industry Classification System (NAICS) code for a particular organization or business person. name: URIRef # The name of the item. namedPosition: URIRef # A position played, performed or filled by a person or organization, as part of an organization. For example, an athlete in a SportsTeam might play in the position named 'Quarterback'. nationality: URIRef # Nationality of the person. naturalProgression: URIRef # The expected progression of the condition if it is not treated and allowed to progress naturally. negativeNotes: URIRef # Indicates, in the context of a [[Review]] (e.g. framed as 'pro' vs 'con' considerations), negative considerations - either as unstructured text, or a list. nerve: URIRef # The underlying innervation associated with the muscle. nerveMotor: URIRef # The neurological pathway extension that involves muscle control. netWorth: URIRef # The total financial value of the person as calculated by subtracting assets from liabilities. newsUpdatesAndGuidelines: URIRef # Indicates a page with news updates and guidelines. This could often be (but is not required to be) the main page containing [[SpecialAnnouncement]] markup on a site. nextItem: URIRef # A link to the ListItem that follows the current one. noBylinesPolicy: URIRef # For a [[NewsMediaOrganization]] or other news-related [[Organization]], a statement explaining when authors of articles are not named in bylines. nonEqual: URIRef # This ordering relation for qualitative values indicates that the subject is not equal to the object. nonProprietaryName: URIRef # The generic name of this drug or supplement. nonprofitStatus: URIRef # nonprofit Status indicates the legal status of a non-profit organization in its primary place of business. normalRange: URIRef # Range of acceptable values for a typical patient, when applicable. nsn: URIRef # Indicates the [NATO stock number](https://en.wikipedia.org/wiki/NATO_Stock_Number) (nsn) of a [[Product]]. numAdults: URIRef # The number of adults staying in the unit. numChildren: URIRef # The number of children staying in the unit. numConstraints: URIRef # Indicates the number of constraints (not counting [[populationType]]) defined for a particular [[StatisticalPopulation]]. This helps applications understand if they have access to a sufficiently complete description of a [[StatisticalPopulation]]. numTracks: URIRef # The number of tracks in this album or playlist. numberOfAccommodationUnits: URIRef # Indicates the total (available plus unavailable) number of accommodation units in an [[ApartmentComplex]], or the number of accommodation units for a specific [[FloorPlan]] (within its specific [[ApartmentComplex]]). See also [[numberOfAvailableAccommodationUnits]]. numberOfAirbags: URIRef # The number or type of airbags in the vehicle. numberOfAvailableAccommodationUnits: URIRef # Indicates the number of available accommodation units in an [[ApartmentComplex]], or the number of accommodation units for a specific [[FloorPlan]] (within its specific [[ApartmentComplex]]). See also [[numberOfAccommodationUnits]]. numberOfAxles: URIRef # The number of axles.\n\nTypical unit code(s): C62 numberOfBathroomsTotal: URIRef # The total integer number of bathrooms in a some [[Accommodation]], following real estate conventions as [documented in RESO](https://ddwiki.reso.org/display/DDW17/BathroomsTotalInteger+Field): "The simple sum of the number of bathrooms. For example for a property with two Full Bathrooms and one Half Bathroom, the Bathrooms Total Integer will be 3.". See also [[numberOfRooms]]. numberOfBedrooms: URIRef # The total integer number of bedrooms in a some [[Accommodation]], [[ApartmentComplex]] or [[FloorPlan]]. numberOfBeds: URIRef # The quantity of the given bed type available in the HotelRoom, Suite, House, or Apartment. numberOfCredits: URIRef # The number of credits or units awarded by a Course or required to complete an EducationalOccupationalProgram. numberOfDoors: URIRef # The number of doors.\n\nTypical unit code(s): C62 numberOfEmployees: URIRef # The number of employees in an organization e.g. business. numberOfEpisodes: URIRef # The number of episodes in this season or series. numberOfForwardGears: URIRef # The total number of forward gears available for the transmission system of the vehicle.\n\nTypical unit code(s): C62 numberOfFullBathrooms: URIRef # Number of full bathrooms - The total number of full and ¾ bathrooms in an [[Accommodation]]. This corresponds to the [BathroomsFull field in RESO](https://ddwiki.reso.org/display/DDW17/BathroomsFull+Field). numberOfItems: URIRef # The number of items in an ItemList. Note that some descriptions might not fully describe all items in a list (e.g., multi-page pagination); in such cases, the numberOfItems would be for the entire list. numberOfLoanPayments: URIRef # The number of payments contractually required at origination to repay the loan. For monthly paying loans this is the number of months from the contractual first payment date to the maturity date. numberOfPages: URIRef # The number of pages in the book. numberOfPartialBathrooms: URIRef # Number of partial bathrooms - The total number of half and ¼ bathrooms in an [[Accommodation]]. This corresponds to the [BathroomsPartial field in RESO](https://ddwiki.reso.org/display/DDW17/BathroomsPartial+Field). numberOfPlayers: URIRef # Indicate how many people can play this game (minimum, maximum, or range). numberOfPreviousOwners: URIRef # The number of owners of the vehicle, including the current one.\n\nTypical unit code(s): C62 numberOfRooms: URIRef # The number of rooms (excluding bathrooms and closets) of the accommodation or lodging business. Typical unit code(s): ROM for room or C62 for no unit. The type of room can be put in the unitText property of the QuantitativeValue. numberOfSeasons: URIRef # The number of seasons in this series. numberedPosition: URIRef # A number associated with a role in an organization, for example, the number on an athlete's jersey. nutrition: URIRef # Nutrition information about the recipe or menu item. object: URIRef # The object upon which the action is carried out, whose state is kept intact or changed. Also known as the semantic roles patient, affected or undergoer (which change their state) or theme (which doesn't). e.g. John read *a book*. observationDate: URIRef # The observationDate of an [[Observation]]. observedNode: URIRef # The observedNode of an [[Observation]], often a [[StatisticalPopulation]]. occupancy: URIRef # The allowed total occupancy for the accommodation in persons (including infants etc). For individual accommodations, this is not necessarily the legal maximum but defines the permitted usage as per the contractual agreement (e.g. a double room used by a single person). Typical unit code(s): C62 for person occupationLocation: URIRef # The region/country for which this occupational description is appropriate. Note that educational requirements and qualifications can vary between jurisdictions. occupationalCategory: URIRef # A category describing the job, preferably using a term from a taxonomy such as [BLS O*NET-SOC](http://www.onetcenter.org/taxonomy.html), [ISCO-08](https://www.ilo.org/public/english/bureau/stat/isco/isco08/) or similar, with the property repeated for each applicable value. Ideally the taxonomy should be identified, and both the textual label and formal code for the category should be provided.\n Note: for historical reasons, any textual label and formal code provided as a literal may be assumed to be from O*NET-SOC. occupationalCredentialAwarded: URIRef # A description of the qualification, award, certificate, diploma or other occupational credential awarded as a consequence of successful completion of this course or program. offerCount: URIRef # The number of offers for the product. offeredBy: URIRef # A pointer to the organization or person making the offer. offers: URIRef # An offer to provide this item—for example, an offer to sell a product, rent the DVD of a movie, perform a service, or give away tickets to an event. Use [[businessFunction]] to indicate the kind of transaction offered, i.e. sell, lease, etc. This property can also be used to describe a [[Demand]]. While this property is listed as expected on a number of common types, it can be used in others. In that case, using a second type, such as Product or a subtype of Product, can clarify the nature of the offer. offersPrescriptionByMail: URIRef # Whether prescriptions can be delivered by mail. openingHours: URIRef # The general opening hours for a business. Opening hours can be specified as a weekly time range, starting with days, then times per day. Multiple days can be listed with commas ',' separating each day. Day or time ranges are specified using a hyphen '-'.\n\n* Days are specified using the following two-letter combinations: ```Mo```, ```Tu```, ```We```, ```Th```, ```Fr```, ```Sa```, ```Su```.\n* Times are specified using 24:00 format. For example, 3pm is specified as ```15:00```, 10am as ```10:00```. \n* Here is an example: <time itemprop="openingHours" datetime="Tu,Th 16:00-20:00">Tuesdays and Thursdays 4-8pm</time>.\n* If a business is open 7 days a week, then it can be specified as <time itemprop="openingHours" datetime="Mo-Su">Monday through Sunday, all day</time>. openingHoursSpecification: URIRef # The opening hours of a certain place. opens: URIRef # The opening hour of the place or service on the given day(s) of the week. operatingSystem: URIRef # Operating systems supported (Windows 7, OSX 10.6, Android 1.6). opponent: URIRef # A sub property of participant. The opponent on this action. option: URIRef # A sub property of object. The options subject to this action. orderDate: URIRef # Date order was placed. orderDelivery: URIRef # The delivery of the parcel related to this order or order item. orderItemNumber: URIRef # The identifier of the order item. orderItemStatus: URIRef # The current status of the order item. orderNumber: URIRef # The identifier of the transaction. orderQuantity: URIRef # The number of the item ordered. If the property is not set, assume the quantity is one. orderStatus: URIRef # The current status of the order. orderedItem: URIRef # The item ordered. organizer: URIRef # An organizer of an Event. originAddress: URIRef # Shipper's address. originalMediaContextDescription: URIRef # Describes, in a [[MediaReview]] when dealing with [[DecontextualizedContent]], background information that can contribute to better interpretation of the [[MediaObject]]. originalMediaLink: URIRef # Link to the page containing an original version of the content, or directly to an online copy of the original [[MediaObject]] content, e.g. video file. originatesFrom: URIRef # The vasculature the lymphatic structure originates, or afferents, from. overdosage: URIRef # Any information related to overdose on a drug, including signs or symptoms, treatments, contact information for emergency response. ownedFrom: URIRef # The date and time of obtaining the product. ownedThrough: URIRef # The date and time of giving up ownership on the product. ownershipFundingInfo: URIRef # For an [[Organization]] (often but not necessarily a [[NewsMediaOrganization]]), a description of organizational ownership structure; funding and grants. In a news/media setting, this is with particular reference to editorial independence. Note that the [[funder]] is also available and can be used to make basic funder information machine-readable. owns: URIRef # Products owned by the organization or person. pageEnd: URIRef # The page on which the work ends; for example "138" or "xvi". pageStart: URIRef # The page on which the work starts; for example "135" or "xiii". pagination: URIRef # Any description of pages that is not separated into pageStart and pageEnd; for example, "1-6, 9, 55" or "10-12, 46-49". parent: URIRef # A parent of this person. parentItem: URIRef # The parent of a question, answer or item in general. parentOrganization: URIRef # The larger organization that this organization is a [[subOrganization]] of, if any. parentService: URIRef # A broadcast service to which the broadcast service may belong to such as regional variations of a national channel. parentTaxon: URIRef # Closest parent taxon of the taxon in question. parents: URIRef # A parents of the person. partOfEpisode: URIRef # The episode to which this clip belongs. partOfInvoice: URIRef # The order is being paid as part of the referenced Invoice. partOfOrder: URIRef # The overall order the items in this delivery were included in. partOfSeason: URIRef # The season to which this episode belongs. partOfSeries: URIRef # The series to which this episode or season belongs. partOfSystem: URIRef # The anatomical or organ system that this structure is part of. partOfTVSeries: URIRef # The TV series to which this episode or season belongs. partOfTrip: URIRef # Identifies that this [[Trip]] is a subTrip of another Trip. For example Day 1, Day 2, etc. of a multi-day trip. participant: URIRef # Other co-agents that participated in the action indirectly. e.g. John wrote a book with *Steve*. partySize: URIRef # Number of people the reservation should accommodate. passengerPriorityStatus: URIRef # The priority status assigned to a passenger for security or boarding (e.g. FastTrack or Priority). passengerSequenceNumber: URIRef # The passenger's sequence number as assigned by the airline. pathophysiology: URIRef # Changes in the normal mechanical, physical, and biochemical functions that are associated with this activity or condition. pattern: URIRef # A pattern that something has, for example 'polka dot', 'striped', 'Canadian flag'. Values are typically expressed as text, although links to controlled value schemes are also supported. payload: URIRef # The permitted weight of passengers and cargo, EXCLUDING the weight of the empty vehicle.\n\nTypical unit code(s): KGM for kilogram, LBR for pound\n\n* Note 1: Many databases specify the permitted TOTAL weight instead, which is the sum of [[weight]] and [[payload]]\n* Note 2: You can indicate additional information in the [[name]] of the [[QuantitativeValue]] node.\n* Note 3: You may also link to a [[QualitativeValue]] node that provides additional information using [[valueReference]].\n* Note 4: Note that you can use [[minValue]] and [[maxValue]] to indicate ranges. paymentAccepted: URIRef # Cash, Credit Card, Cryptocurrency, Local Exchange Tradings System, etc. paymentDue: URIRef # The date that payment is due. paymentDueDate: URIRef # The date that payment is due. paymentMethod: URIRef # The name of the credit card or other method of payment for the order. paymentMethodId: URIRef # An identifier for the method of payment used (e.g. the last 4 digits of the credit card). paymentStatus: URIRef # The status of payment; whether the invoice has been paid or not. paymentUrl: URIRef # The URL for sending a payment. penciler: URIRef # The individual who draws the primary narrative artwork. percentile10: URIRef # The 10th percentile value. percentile25: URIRef # The 25th percentile value. percentile75: URIRef # The 75th percentile value. percentile90: URIRef # The 90th percentile value. performTime: URIRef # The length of time it takes to perform instructions or a direction (not including time to prepare the supplies), in [ISO 8601 duration format](http://en.wikipedia.org/wiki/ISO_8601). performer: URIRef # A performer at the event—for example, a presenter, musician, musical group or actor. performerIn: URIRef # Event that this person is a performer or participant in. performers: URIRef # The main performer or performers of the event—for example, a presenter, musician, or actor. permissionType: URIRef # The type of permission granted the person, organization, or audience. permissions: URIRef # Permission(s) required to run the app (for example, a mobile app may require full internet access or may run only on wifi). permitAudience: URIRef # The target audience for this permit. permittedUsage: URIRef # Indications regarding the permitted usage of the accommodation. petsAllowed: URIRef # Indicates whether pets are allowed to enter the accommodation or lodging business. More detailed information can be put in a text value. phoneticText: URIRef # Representation of a text [[textValue]] using the specified [[speechToTextMarkup]]. For example the city name of Houston in IPA: /ˈhjuːstən/. photo: URIRef # A photograph of this place. photos: URIRef # Photographs of this place. physicalRequirement: URIRef # A description of the types of physical activity associated with the job. Defined terms such as those in O*net may be used, but note that there is no way to specify the level of ability as well as its nature when using a defined term. physiologicalBenefits: URIRef # Specific physiologic benefits associated to the plan. pickupLocation: URIRef # Where a taxi will pick up a passenger or a rental car can be picked up. pickupTime: URIRef # When a taxi will pickup a passenger or a rental car can be picked up. playMode: URIRef # Indicates whether this game is multi-player, co-op or single-player. The game can be marked as multi-player, co-op and single-player at the same time. playerType: URIRef # Player type required—for example, Flash or Silverlight. playersOnline: URIRef # Number of players on the server. polygon: URIRef # A polygon is the area enclosed by a point-to-point path for which the starting and ending points are the same. A polygon is expressed as a series of four or more space delimited points where the first and final points are identical. populationType: URIRef # Indicates the populationType common to all members of a [[StatisticalPopulation]]. position: URIRef # The position of an item in a series or sequence of items. positiveNotes: URIRef # Indicates, in the context of a [[Review]] (e.g. framed as 'pro' vs 'con' considerations), positive considerations - either as unstructured text, or a list. possibleComplication: URIRef # A possible unexpected and unfavorable evolution of a medical condition. Complications may include worsening of the signs or symptoms of the disease, extension of the condition to other organ systems, etc. possibleTreatment: URIRef # A possible treatment to address this condition, sign or symptom. postOfficeBoxNumber: URIRef # The post office box number for PO box addresses. postOp: URIRef # A description of the postoperative procedures, care, and/or followups for this device. postalCode: URIRef # The postal code. For example, 94043. postalCodeBegin: URIRef # First postal code in a range (included). postalCodeEnd: URIRef # Last postal code in the range (included). Needs to be after [[postalCodeBegin]]. postalCodePrefix: URIRef # A defined range of postal codes indicated by a common textual prefix. Used for non-numeric systems such as UK. postalCodeRange: URIRef # A defined range of postal codes. potentialAction: URIRef # Indicates a potential Action, which describes an idealized action in which this thing would play an 'object' role. potentialUse: URIRef # Intended use of the BioChemEntity by humans. preOp: URIRef # A description of the workup, testing, and other preparations required before implanting this device. predecessorOf: URIRef # A pointer from a previous, often discontinued variant of the product to its newer variant. pregnancyCategory: URIRef # Pregnancy category of this drug. pregnancyWarning: URIRef # Any precaution, guidance, contraindication, etc. related to this drug's use during pregnancy. prepTime: URIRef # The length of time it takes to prepare the items to be used in instructions or a direction, in [ISO 8601 duration format](http://en.wikipedia.org/wiki/ISO_8601). preparation: URIRef # Typical preparation that a patient must undergo before having the procedure performed. prescribingInfo: URIRef # Link to prescribing information for the drug. prescriptionStatus: URIRef # Indicates the status of drug prescription eg. local catalogs classifications or whether the drug is available by prescription or over-the-counter, etc. previousItem: URIRef # A link to the ListItem that preceeds the current one. previousStartDate: URIRef # Used in conjunction with eventStatus for rescheduled or cancelled events. This property contains the previously scheduled start date. For rescheduled events, the startDate property should be used for the newly scheduled start date. In the (rare) case of an event that has been postponed and rescheduled multiple times, this field may be repeated. price: URIRef # The offer price of a product, or of a price component when attached to PriceSpecification and its subtypes.\n\nUsage guidelines:\n\n* Use the [[priceCurrency]] property (with standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217) e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies) for cryptocurrencies e.g. "BTC"; well known names for [Local Exchange Tradings Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system) (LETS) and other currency types e.g. "Ithaca HOUR") instead of including [ambiguous symbols](http://en.wikipedia.org/wiki/Dollar_sign#Currencies_that_use_the_dollar_or_peso_sign) such as '$' in the value.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator.\n* Note that both [RDFa](http://www.w3.org/TR/xhtml-rdfa-primer/#using-the-content-attribute) and Microdata syntax allow the use of a "content=" attribute for publishing simple machine-readable values alongside more human-friendly formatting.\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols. priceComponent: URIRef # This property links to all [[UnitPriceSpecification]] nodes that apply in parallel for the [[CompoundPriceSpecification]] node. priceComponentType: URIRef # Identifies a price component (for example, a line item on an invoice), part of the total price for an offer. priceCurrency: URIRef # The currency of the price, or a price component when attached to [[PriceSpecification]] and its subtypes.\n\nUse standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217) e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies) for cryptocurrencies e.g. "BTC"; well known names for [Local Exchange Tradings Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system) (LETS) and other currency types e.g. "Ithaca HOUR". priceRange: URIRef # The price range of the business, for example ```$$$```. priceSpecification: URIRef # One or more detailed price specifications, indicating the unit price and delivery or payment charges. priceType: URIRef # Defines the type of a price specified for an offered product, for example a list price, a (temporary) sale price or a manufacturer suggested retail price. If multiple prices are specified for an offer the [[priceType]] property can be used to identify the type of each such specified price. The value of priceType can be specified as a value from enumeration PriceTypeEnumeration or as a free form text string for price types that are not already predefined in PriceTypeEnumeration. priceValidUntil: URIRef # The date after which the price is no longer available. primaryImageOfPage: URIRef # Indicates the main image on the page. primaryPrevention: URIRef # A preventative therapy used to prevent an initial occurrence of the medical condition, such as vaccination. printColumn: URIRef # The number of the column in which the NewsArticle appears in the print edition. printEdition: URIRef # The edition of the print product in which the NewsArticle appears. printPage: URIRef # If this NewsArticle appears in print, this field indicates the name of the page on which the article is found. Please note that this field is intended for the exact page name (e.g. A5, B18). printSection: URIRef # If this NewsArticle appears in print, this field indicates the print section in which the article appeared. procedure: URIRef # A description of the procedure involved in setting up, using, and/or installing the device. procedureType: URIRef # The type of procedure, for example Surgical, Noninvasive, or Percutaneous. processingTime: URIRef # Estimated processing time for the service using this channel. processorRequirements: URIRef # Processor architecture required to run the application (e.g. IA64). producer: URIRef # The person or organization who produced the work (e.g. music album, movie, tv/radio series etc.). produces: URIRef # The tangible thing generated by the service, e.g. a passport, permit, etc. productGroupID: URIRef # Indicates a textual identifier for a ProductGroup. productID: URIRef # The product identifier, such as ISBN. For example: ``` meta itemprop="productID" content="isbn:123-456-789" ```. productSupported: URIRef # The product or service this support contact point is related to (such as product support for a particular product line). This can be a specific product or product line (e.g. "iPhone") or a general category of products or services (e.g. "smartphones"). productionCompany: URIRef # The production company or studio responsible for the item e.g. series, video game, episode etc. productionDate: URIRef # The date of production of the item, e.g. vehicle. proficiencyLevel: URIRef # Proficiency needed for this content; expected values: 'Beginner', 'Expert'. programMembershipUsed: URIRef # Any membership in a frequent flyer, hotel loyalty program, etc. being applied to the reservation. programName: URIRef # The program providing the membership. programPrerequisites: URIRef # Prerequisites for enrolling in the program. programType: URIRef # The type of educational or occupational program. For example, classroom, internship, alternance, etc.. programmingLanguage: URIRef # The computer programming language. programmingModel: URIRef # Indicates whether API is managed or unmanaged. propertyID: URIRef # A commonly used identifier for the characteristic represented by the property, e.g. a manufacturer or a standard code for a property. propertyID can be (1) a prefixed string, mainly meant to be used with standards for product properties; (2) a site-specific, non-prefixed string (e.g. the primary key of the property or the vendor-specific id of the property), or (3) a URL indicating the type of the property, either pointing to an external vocabulary, or a Web resource that describes the property (e.g. a glossary entry). Standards bodies should promote a standard prefix for the identifiers of properties from their standards. proprietaryName: URIRef # Proprietary name given to the diet plan, typically by its originator or creator. proteinContent: URIRef # The number of grams of protein. provider: URIRef # The service provider, service operator, or service performer; the goods producer. Another party (a seller) may offer those services or goods on behalf of the provider. A provider may also serve as the seller. providerMobility: URIRef # Indicates the mobility of a provided service (e.g. 'static', 'dynamic'). providesBroadcastService: URIRef # The BroadcastService offered on this channel. providesService: URIRef # The service provided by this channel. publicAccess: URIRef # A flag to signal that the [[Place]] is open to public visitors. If this property is omitted there is no assumed default boolean value publicTransportClosuresInfo: URIRef # Information about public transport closures. publication: URIRef # A publication event associated with the item. publicationType: URIRef # The type of the medical article, taken from the US NLM MeSH publication type catalog. See also [MeSH documentation](http://www.nlm.nih.gov/mesh/pubtypes.html). publishedBy: URIRef # An agent associated with the publication event. publishedOn: URIRef # A broadcast service associated with the publication event. publisher: URIRef # The publisher of the creative work. publisherImprint: URIRef # The publishing division which published the comic. publishingPrinciples: URIRef # The publishingPrinciples property indicates (typically via [[URL]]) a document describing the editorial principles of an [[Organization]] (or individual e.g. a [[Person]] writing a blog) that relate to their activities as a publisher, e.g. ethics or diversity policies. When applied to a [[CreativeWork]] (e.g. [[NewsArticle]]) the principles are those of the party primarily responsible for the creation of the [[CreativeWork]]. While such policies are most typically expressed in natural language, sometimes related information (e.g. indicating a [[funder]]) can be expressed using schema.org terminology. purchaseDate: URIRef # The date the item e.g. vehicle was purchased by the current owner. qualifications: URIRef # Specific qualifications required for this role or Occupation. quarantineGuidelines: URIRef # Guidelines about quarantine rules, e.g. in the context of a pandemic. query: URIRef # A sub property of instrument. The query used on this action. quest: URIRef # The task that a player-controlled character, or group of characters may complete in order to gain a reward. question: URIRef # A sub property of object. A question. rangeIncludes: URIRef # Relates a property to a class that constitutes (one of) the expected type(s) for values of the property. ratingCount: URIRef # The count of total number of ratings. ratingExplanation: URIRef # A short explanation (e.g. one to two sentences) providing background context and other information that led to the conclusion expressed in the rating. This is particularly applicable to ratings associated with "fact check" markup using [[ClaimReview]]. ratingValue: URIRef # The rating for the content.\n\nUsage guidelines:\n\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator. readBy: URIRef # A person who reads (performs) the audiobook. readonlyValue: URIRef # Whether or not a property is mutable. Default is false. Specifying this for a property that also has a value makes it act similar to a "hidden" input in an HTML form. realEstateAgent: URIRef # A sub property of participant. The real estate agent involved in the action. recipe: URIRef # A sub property of instrument. The recipe/instructions used to perform the action. recipeCategory: URIRef # The category of the recipe—for example, appetizer, entree, etc. recipeCuisine: URIRef # The cuisine of the recipe (for example, French or Ethiopian). recipeIngredient: URIRef # A single ingredient used in the recipe, e.g. sugar, flour or garlic. recipeInstructions: URIRef # A step in making the recipe, in the form of a single item (document, video, etc.) or an ordered list with HowToStep and/or HowToSection items. recipeYield: URIRef # The quantity produced by the recipe (for example, number of people served, number of servings, etc). recipient: URIRef # A sub property of participant. The participant who is at the receiving end of the action. recognizedBy: URIRef # An organization that acknowledges the validity, value or utility of a credential. Note: recognition may include a process of quality assurance or accreditation. recognizingAuthority: URIRef # If applicable, the organization that officially recognizes this entity as part of its endorsed system of medicine. recommendationStrength: URIRef # Strength of the guideline's recommendation (e.g. 'class I'). recommendedIntake: URIRef # Recommended intake of this supplement for a given population as defined by a specific recommending authority. recordLabel: URIRef # The label that issued the release. recordedAs: URIRef # An audio recording of the work. recordedAt: URIRef # The Event where the CreativeWork was recorded. The CreativeWork may capture all or part of the event. recordedIn: URIRef # The CreativeWork that captured all or part of this Event. recordingOf: URIRef # The composition this track is a recording of. recourseLoan: URIRef # The only way you get the money back in the event of default is the security. Recourse is where you still have the opportunity to go back to the borrower for the rest of the money. referenceQuantity: URIRef # The reference quantity for which a certain price applies, e.g. 1 EUR per 4 kWh of electricity. This property is a replacement for unitOfMeasurement for the advanced cases where the price does not relate to a standard unit. referencesOrder: URIRef # The Order(s) related to this Invoice. One or more Orders may be combined into a single Invoice. refundType: URIRef # A refund type, from an enumerated list. regionDrained: URIRef # The anatomical or organ system drained by this vessel; generally refers to a specific part of an organ. regionsAllowed: URIRef # The regions where the media is allowed. If not specified, then it's assumed to be allowed everywhere. Specify the countries in [ISO 3166 format](http://en.wikipedia.org/wiki/ISO_3166). relatedAnatomy: URIRef # Anatomical systems or structures that relate to the superficial anatomy. relatedCondition: URIRef # A medical condition associated with this anatomy. relatedDrug: URIRef # Any other drug related to this one, for example commonly-prescribed alternatives. relatedLink: URIRef # A link related to this web page, for example to other related web pages. relatedStructure: URIRef # Related anatomical structure(s) that are not part of the system but relate or connect to it, such as vascular bundles associated with an organ system. relatedTherapy: URIRef # A medical therapy related to this anatomy. relatedTo: URIRef # The most generic familial relation. releaseDate: URIRef # The release date of a product or product model. This can be used to distinguish the exact variant of a product. releaseNotes: URIRef # Description of what changed in this version. releaseOf: URIRef # The album this is a release of. releasedEvent: URIRef # The place and time the release was issued, expressed as a PublicationEvent. relevantOccupation: URIRef # The Occupation for the JobPosting. relevantSpecialty: URIRef # If applicable, a medical specialty in which this entity is relevant. remainingAttendeeCapacity: URIRef # The number of attendee places for an event that remain unallocated. renegotiableLoan: URIRef # Whether the terms for payment of interest can be renegotiated during the life of the loan. repeatCount: URIRef # Defines the number of times a recurring [[Event]] will take place repeatFrequency: URIRef # Defines the frequency at which [[Event]]s will occur according to a schedule [[Schedule]]. The intervals between events should be defined as a [[Duration]] of time. repetitions: URIRef # Number of times one should repeat the activity. replacee: URIRef # A sub property of object. The object that is being replaced. replacer: URIRef # A sub property of object. The object that replaces. replyToUrl: URIRef # The URL at which a reply may be posted to the specified UserComment. reportNumber: URIRef # The number or other unique designator assigned to a Report by the publishing organization. representativeOfPage: URIRef # Indicates whether this image is representative of the content of the page. requiredCollateral: URIRef # Assets required to secure loan or credit repayments. It may take form of third party pledge, goods, financial instruments (cash, securities, etc.) requiredGender: URIRef # Audiences defined by a person's gender. requiredMaxAge: URIRef # Audiences defined by a person's maximum age. requiredMinAge: URIRef # Audiences defined by a person's minimum age. requiredQuantity: URIRef # The required quantity of the item(s). requirements: URIRef # Component dependency requirements for application. This includes runtime environments and shared libraries that are not included in the application distribution package, but required to run the application (Examples: DirectX, Java or .NET runtime). requiresSubscription: URIRef # Indicates if use of the media require a subscription (either paid or free). Allowed values are ```true``` or ```false``` (note that an earlier version had 'yes', 'no'). reservationFor: URIRef # The thing -- flight, event, restaurant,etc. being reserved. reservationId: URIRef # A unique identifier for the reservation. reservationStatus: URIRef # The current status of the reservation. reservedTicket: URIRef # A ticket associated with the reservation. responsibilities: URIRef # Responsibilities associated with this role or Occupation. restPeriods: URIRef # How often one should break from the activity. restockingFee: URIRef # Use [[MonetaryAmount]] to specify a fixed restocking fee for product returns, or use [[Number]] to specify a percentage of the product price paid by the customer. result: URIRef # The result produced in the action. e.g. John wrote *a book*. resultComment: URIRef # A sub property of result. The Comment created or sent as a result of this action. resultReview: URIRef # A sub property of result. The review that resulted in the performing of the action. returnFees: URIRef # The type of return fees for purchased products (for any return reason) returnLabelSource: URIRef # The method (from an enumeration) by which the customer obtains a return shipping label for a product returned for any reason. returnMethod: URIRef # The type of return method offered, specified from an enumeration. returnPolicyCategory: URIRef # Specifies an applicable return policy (from an enumeration). returnPolicyCountry: URIRef # The country where the product has to be sent to for returns, for example "Ireland" using the [[name]] property of [[Country]]. You can also provide the two-letter [ISO 3166-1 alpha-2 country code](http://en.wikipedia.org/wiki/ISO_3166-1). Note that this can be different from the country where the product was originally shipped from or sent too. returnPolicySeasonalOverride: URIRef # Seasonal override of a return policy. returnShippingFeesAmount: URIRef # Amount of shipping costs for product returns (for any reason). Applicable when property [[returnFees]] equals [[ReturnShippingFees]]. review: URIRef # A review of the item. reviewAspect: URIRef # This Review or Rating is relevant to this part or facet of the itemReviewed. reviewBody: URIRef # The actual body of the review. reviewCount: URIRef # The count of total number of reviews. reviewRating: URIRef # The rating given in this review. Note that reviews can themselves be rated. The ```reviewRating``` applies to rating given by the review. The [[aggregateRating]] property applies to the review itself, as a creative work. reviewedBy: URIRef # People or organizations that have reviewed the content on this web page for accuracy and/or completeness. reviews: URIRef # Review of the item. riskFactor: URIRef # A modifiable or non-modifiable factor that increases the risk of a patient contracting this condition, e.g. age, coexisting condition. risks: URIRef # Specific physiologic risks associated to the diet plan. roleName: URIRef # A role played, performed or filled by a person or organization. For example, the team of creators for a comic book might fill the roles named 'inker', 'penciller', and 'letterer'; or an athlete in a SportsTeam might play in the position named 'Quarterback'. roofLoad: URIRef # The permitted total weight of cargo and installations (e.g. a roof rack) on top of the vehicle.\n\nTypical unit code(s): KGM for kilogram, LBR for pound\n\n* Note 1: You can indicate additional information in the [[name]] of the [[QuantitativeValue]] node.\n* Note 2: You may also link to a [[QualitativeValue]] node that provides additional information using [[valueReference]]\n* Note 3: Note that you can use [[minValue]] and [[maxValue]] to indicate ranges. rsvpResponse: URIRef # The response (yes, no, maybe) to the RSVP. runsTo: URIRef # The vasculature the lymphatic structure runs, or efferents, to. runtime: URIRef # Runtime platform or script interpreter dependencies (Example - Java v1, Python2.3, .Net Framework 3.0). runtimePlatform: URIRef # Runtime platform or script interpreter dependencies (Example - Java v1, Python2.3, .Net Framework 3.0). rxcui: URIRef # The RxCUI drug identifier from RXNORM. safetyConsideration: URIRef # Any potential safety concern associated with the supplement. May include interactions with other drugs and foods, pregnancy, breastfeeding, known adverse reactions, and documented efficacy of the supplement. salaryCurrency: URIRef # The currency (coded using [ISO 4217](http://en.wikipedia.org/wiki/ISO_4217) ) used for the main salary information in this job posting or for this employee. salaryUponCompletion: URIRef # The expected salary upon completing the training. sameAs: URIRef # URL of a reference Web page that unambiguously indicates the item's identity. E.g. the URL of the item's Wikipedia page, Wikidata entry, or official website. sampleType: URIRef # What type of code sample: full (compile ready) solution, code snippet, inline code, scripts, template. saturatedFatContent: URIRef # The number of grams of saturated fat. scheduleTimezone: URIRef # Indicates the timezone for which the time(s) indicated in the [[Schedule]] are given. The value provided should be among those listed in the IANA Time Zone Database. scheduledPaymentDate: URIRef # The date the invoice is scheduled to be paid. scheduledTime: URIRef # The time the object is scheduled to. schemaVersion: URIRef # Indicates (by URL or string) a particular version of a schema used in some CreativeWork. This property was created primarily to indicate the use of a specific schema.org release, e.g. ```10.0``` as a simple string, or more explicitly via URL, ```https://schema.org/docs/releases.html#v10.0```. There may be situations in which other schemas might usefully be referenced this way, e.g. ```http://dublincore.org/specifications/dublin-core/dces/1999-07-02/``` but this has not been carefully explored in the community. schoolClosuresInfo: URIRef # Information about school closures. screenCount: URIRef # The number of screens in the movie theater. screenshot: URIRef # A link to a screenshot image of the app. sdDatePublished: URIRef # Indicates the date on which the current structured data was generated / published. Typically used alongside [[sdPublisher]] sdLicense: URIRef # A license document that applies to this structured data, typically indicated by URL. sdPublisher: URIRef # Indicates the party responsible for generating and publishing the current structured data markup, typically in cases where the structured data is derived automatically from existing published content but published on a different site. For example, student projects and open data initiatives often re-publish existing content with more explicitly structured metadata. The [[sdPublisher]] property helps make such practices more explicit. season: URIRef # A season in a media series. seasonNumber: URIRef # Position of the season within an ordered group of seasons. seasons: URIRef # A season in a media series. seatNumber: URIRef # The location of the reserved seat (e.g., 27). seatRow: URIRef # The row location of the reserved seat (e.g., B). seatSection: URIRef # The section location of the reserved seat (e.g. Orchestra). seatingCapacity: URIRef # The number of persons that can be seated (e.g. in a vehicle), both in terms of the physical space available, and in terms of limitations set by law.\n\nTypical unit code(s): C62 for persons seatingType: URIRef # The type/class of the seat. secondaryPrevention: URIRef # A preventative therapy used to prevent reoccurrence of the medical condition after an initial episode of the condition. securityClearanceRequirement: URIRef # A description of any security clearance requirements of the job. securityScreening: URIRef # The type of security screening the passenger is subject to. seeks: URIRef # A pointer to products or services sought by the organization or person (demand). seller: URIRef # An entity which offers (sells / leases / lends / loans) the services / goods. A seller may also be a provider. sender: URIRef # A sub property of participant. The participant who is at the sending end of the action. sensoryRequirement: URIRef # A description of any sensory requirements and levels necessary to function on the job, including hearing and vision. Defined terms such as those in O*net may be used, but note that there is no way to specify the level of ability as well as its nature when using a defined term. sensoryUnit: URIRef # The neurological pathway extension that inputs and sends information to the brain or spinal cord. serialNumber: URIRef # The serial number or any alphanumeric identifier of a particular product. When attached to an offer, it is a shortcut for the serial number of the product included in the offer. seriousAdverseOutcome: URIRef # A possible serious complication and/or serious side effect of this therapy. Serious adverse outcomes include those that are life-threatening; result in death, disability, or permanent damage; require hospitalization or prolong existing hospitalization; cause congenital anomalies or birth defects; or jeopardize the patient and may require medical or surgical intervention to prevent one of the outcomes in this definition. serverStatus: URIRef # Status of a game server. servesCuisine: URIRef # The cuisine of the restaurant. serviceArea: URIRef # The geographic area where the service is provided. serviceAudience: URIRef # The audience eligible for this service. serviceLocation: URIRef # The location (e.g. civic structure, local business, etc.) where a person can go to access the service. serviceOperator: URIRef # The operating organization, if different from the provider. This enables the representation of services that are provided by an organization, but operated by another organization like a subcontractor. serviceOutput: URIRef # The tangible thing generated by the service, e.g. a passport, permit, etc. servicePhone: URIRef # The phone number to use to access the service. servicePostalAddress: URIRef # The address for accessing the service by mail. serviceSmsNumber: URIRef # The number to access the service by text message. serviceType: URIRef # The type of service being offered, e.g. veterans' benefits, emergency relief, etc. serviceUrl: URIRef # The website to access the service. servingSize: URIRef # The serving size, in terms of the number of volume or mass. sha256: URIRef # The [SHA-2](https://en.wikipedia.org/wiki/SHA-2) SHA256 hash of the content of the item. For example, a zero-length input has value 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' sharedContent: URIRef # A CreativeWork such as an image, video, or audio clip shared as part of this posting. shippingDestination: URIRef # indicates (possibly multiple) shipping destinations. These can be defined in several ways e.g. postalCode ranges. shippingDetails: URIRef # Indicates information about the shipping policies and options associated with an [[Offer]]. shippingLabel: URIRef # Label to match an [[OfferShippingDetails]] with a [[ShippingRateSettings]] (within the context of a [[shippingSettingsLink]] cross-reference). shippingRate: URIRef # The shipping rate is the cost of shipping to the specified destination. Typically, the maxValue and currency values (of the [[MonetaryAmount]]) are most appropriate. shippingSettingsLink: URIRef # Link to a page containing [[ShippingRateSettings]] and [[DeliveryTimeSettings]] details. sibling: URIRef # A sibling of the person. siblings: URIRef # A sibling of the person. signDetected: URIRef # A sign detected by the test. signOrSymptom: URIRef # A sign or symptom of this condition. Signs are objective or physically observable manifestations of the medical condition while symptoms are the subjective experience of the medical condition. significance: URIRef # The significance associated with the superficial anatomy; as an example, how characteristics of the superficial anatomy can suggest underlying medical conditions or courses of treatment. significantLink: URIRef # One of the more significant URLs on the page. Typically, these are the non-navigation links that are clicked on the most. significantLinks: URIRef # The most significant URLs on the page. Typically, these are the non-navigation links that are clicked on the most. size: URIRef # A standardized size of a product or creative work, specified either through a simple textual string (for example 'XL', '32Wx34L'), a QuantitativeValue with a unitCode, or a comprehensive and structured [[SizeSpecification]]; in other cases, the [[width]], [[height]], [[depth]] and [[weight]] properties may be more applicable. sizeGroup: URIRef # The size group (also known as "size type") for a product's size. Size groups are common in the fashion industry to define size segments and suggested audiences for wearable products. Multiple values can be combined, for example "men's big and tall", "petite maternity" or "regular" sizeSystem: URIRef # The size system used to identify a product's size. Typically either a standard (for example, "GS1" or "ISO-EN13402"), country code (for example "US" or "JP"), or a measuring system (for example "Metric" or "Imperial"). skills: URIRef # A statement of knowledge, skill, ability, task or any other assertion expressing a competency that is desired or required to fulfill this role or to work in this occupation. sku: URIRef # The Stock Keeping Unit (SKU), i.e. a merchant-specific identifier for a product or service, or the product to which the offer refers. slogan: URIRef # A slogan or motto associated with the item. smiles: URIRef # A specification in form of a line notation for describing the structure of chemical species using short ASCII strings. Double bond stereochemistry \ indicators may need to be escaped in the string in formats where the backslash is an escape character. smokingAllowed: URIRef # Indicates whether it is allowed to smoke in the place, e.g. in the restaurant, hotel or hotel room. sodiumContent: URIRef # The number of milligrams of sodium. softwareAddOn: URIRef # Additional content for a software application. softwareHelp: URIRef # Software application help. softwareRequirements: URIRef # Component dependency requirements for application. This includes runtime environments and shared libraries that are not included in the application distribution package, but required to run the application (Examples: DirectX, Java or .NET runtime). softwareVersion: URIRef # Version of the software instance. sourceOrganization: URIRef # The Organization on whose behalf the creator was working. sourcedFrom: URIRef # The neurological pathway that originates the neurons. spatial: URIRef # The "spatial" property can be used in cases when more specific properties (e.g. [[locationCreated]], [[spatialCoverage]], [[contentLocation]]) are not known to be appropriate. spatialCoverage: URIRef # The spatialCoverage of a CreativeWork indicates the place(s) which are the focus of the content. It is a subproperty of contentLocation intended primarily for more technical and detailed materials. For example with a Dataset, it indicates areas that the dataset describes: a dataset of New York weather would have spatialCoverage which was the place: the state of New York. speakable: URIRef # Indicates sections of a Web page that are particularly 'speakable' in the sense of being highlighted as being especially appropriate for text-to-speech conversion. Other sections of a page may also be usefully spoken in particular circumstances; the 'speakable' property serves to indicate the parts most likely to be generally useful for speech. The *speakable* property can be repeated an arbitrary number of times, with three kinds of possible 'content-locator' values: 1.) *id-value* URL references - uses *id-value* of an element in the page being annotated. The simplest use of *speakable* has (potentially relative) URL values, referencing identified sections of the document concerned. 2.) CSS Selectors - addresses content in the annotated page, eg. via class attribute. Use the [[cssSelector]] property. 3.) XPaths - addresses content via XPaths (assuming an XML view of the content). Use the [[xpath]] property. For more sophisticated markup of speakable sections beyond simple ID references, either CSS selectors or XPath expressions to pick out document section(s) as speakable. For this we define a supporting type, [[SpeakableSpecification]] which is defined to be a possible value of the *speakable* property. specialCommitments: URIRef # Any special commitments associated with this job posting. Valid entries include VeteranCommit, MilitarySpouseCommit, etc. specialOpeningHoursSpecification: URIRef # The special opening hours of a certain place.\n\nUse this to explicitly override general opening hours brought in scope by [[openingHoursSpecification]] or [[openingHours]]. specialty: URIRef # One of the domain specialities to which this web page's content applies. speechToTextMarkup: URIRef # Form of markup used. eg. [SSML](https://www.w3.org/TR/speech-synthesis11) or [IPA](https://www.wikidata.org/wiki/Property:P898). speed: URIRef # The speed range of the vehicle. If the vehicle is powered by an engine, the upper limit of the speed range (indicated by [[maxValue]] should be the maximum speed achievable under regular conditions.\n\nTypical unit code(s): KMH for km/h, HM for mile per hour (0.447 04 m/s), KNT for knot\n\n*Note 1: Use [[minValue]] and [[maxValue]] to indicate the range. Typically, the minimal value is zero.\n* Note 2: There are many different ways of measuring the speed range. You can link to information about how the given value has been determined using the [[valueReference]] property. spokenByCharacter: URIRef # The (e.g. fictional) character, Person or Organization to whom the quotation is attributed within the containing CreativeWork. sponsor: URIRef # A person or organization that supports a thing through a pledge, promise, or financial contribution. e.g. a sponsor of a Medical Study or a corporate sponsor of an event. sport: URIRef # A type of sport (e.g. Baseball). sportsActivityLocation: URIRef # A sub property of location. The sports activity location where this action occurred. sportsEvent: URIRef # A sub property of location. The sports event where this action occurred. sportsTeam: URIRef # A sub property of participant. The sports team that participated on this action. spouse: URIRef # The person's spouse. stage: URIRef # The stage of the condition, if applicable. stageAsNumber: URIRef # The stage represented as a number, e.g. 3. starRating: URIRef # An official rating for a lodging business or food establishment, e.g. from national associations or standards bodies. Use the author property to indicate the rating organization, e.g. as an Organization with name such as (e.g. HOTREC, DEHOGA, WHR, or Hotelstars). startDate: URIRef # The start date and time of the item (in [ISO 8601 date format](http://en.wikipedia.org/wiki/ISO_8601)). startOffset: URIRef # The start time of the clip expressed as the number of seconds from the beginning of the work. startTime: URIRef # The startTime of something. For a reserved event or service (e.g. FoodEstablishmentReservation), the time that it is expected to start. For actions that span a period of time, when the action was performed. e.g. John wrote a book from *January* to December. For media, including audio and video, it's the time offset of the start of a clip within a larger file.\n\nNote that Event uses startDate/endDate instead of startTime/endTime, even when describing dates with times. This situation may be clarified in future revisions. status: URIRef # The status of the study (enumerated). steeringPosition: URIRef # The position of the steering wheel or similar device (mostly for cars). step: URIRef # A single step item (as HowToStep, text, document, video, etc.) or a HowToSection. stepValue: URIRef # The stepValue attribute indicates the granularity that is expected (and required) of the value in a PropertyValueSpecification. steps: URIRef # A single step item (as HowToStep, text, document, video, etc.) or a HowToSection (originally misnamed 'steps'; 'step' is preferred). storageRequirements: URIRef # Storage requirements (free space required). streetAddress: URIRef # The street address. For example, 1600 Amphitheatre Pkwy. strengthUnit: URIRef # The units of an active ingredient's strength, e.g. mg. strengthValue: URIRef # The value of an active ingredient's strength, e.g. 325. structuralClass: URIRef # The name given to how bone physically connects to each other. study: URIRef # A medical study or trial related to this entity. studyDesign: URIRef # Specifics about the observational study design (enumerated). studyLocation: URIRef # The location in which the study is taking/took place. studySubject: URIRef # A subject of the study, i.e. one of the medical conditions, therapies, devices, drugs, etc. investigated by the study. subEvent: URIRef # An Event that is part of this event. For example, a conference event includes many presentations, each of which is a subEvent of the conference. subEvents: URIRef # Events that are a part of this event. For example, a conference event includes many presentations, each subEvents of the conference. subOrganization: URIRef # A relationship between two organizations where the first includes the second, e.g., as a subsidiary. See also: the more specific 'department' property. subReservation: URIRef # The individual reservations included in the package. Typically a repeated property. subStageSuffix: URIRef # The substage, e.g. 'a' for Stage IIIa. subStructure: URIRef # Component (sub-)structure(s) that comprise this anatomical structure. subTest: URIRef # A component test of the panel. subTrip: URIRef # Identifies a [[Trip]] that is a subTrip of this Trip. For example Day 1, Day 2, etc. of a multi-day trip. subjectOf: URIRef # A CreativeWork or Event about this Thing. subtitleLanguage: URIRef # Languages in which subtitles/captions are available, in [IETF BCP 47 standard format](http://tools.ietf.org/html/bcp47). successorOf: URIRef # A pointer from a newer variant of a product to its previous, often discontinued predecessor. sugarContent: URIRef # The number of grams of sugar. suggestedAge: URIRef # The age or age range for the intended audience or person, for example 3-12 months for infants, 1-5 years for toddlers. suggestedAnswer: URIRef # An answer (possibly one of several, possibly incorrect) to a Question, e.g. on a Question/Answer site. suggestedGender: URIRef # The suggested gender of the intended person or audience, for example "male", "female", or "unisex". suggestedMaxAge: URIRef # Maximum recommended age in years for the audience or user. suggestedMeasurement: URIRef # A suggested range of body measurements for the intended audience or person, for example inseam between 32 and 34 inches or height between 170 and 190 cm. Typically found on a size chart for wearable products. suggestedMinAge: URIRef # Minimum recommended age in years for the audience or user. suitableForDiet: URIRef # Indicates a dietary restriction or guideline for which this recipe or menu item is suitable, e.g. diabetic, halal etc. superEvent: URIRef # An event that this event is a part of. For example, a collection of individual music performances might each have a music festival as their superEvent. supersededBy: URIRef # Relates a term (i.e. a property, class or enumeration) to one that supersedes it. supply: URIRef # A sub-property of instrument. A supply consumed when performing instructions or a direction. supplyTo: URIRef # The area to which the artery supplies blood. supportingData: URIRef # Supporting data for a SoftwareApplication. surface: URIRef # A material used as a surface in some artwork, e.g. Canvas, Paper, Wood, Board, etc. target: URIRef # Indicates a target EntryPoint for an Action. targetCollection: URIRef # A sub property of object. The collection target of the action. targetDescription: URIRef # The description of a node in an established educational framework. targetName: URIRef # The name of a node in an established educational framework. targetPlatform: URIRef # Type of app development: phone, Metro style, desktop, XBox, etc. targetPopulation: URIRef # Characteristics of the population for which this is intended, or which typically uses it, e.g. 'adults'. targetProduct: URIRef # Target Operating System / Product to which the code applies. If applies to several versions, just the product name can be used. targetUrl: URIRef # The URL of a node in an established educational framework. taxID: URIRef # The Tax / Fiscal ID of the organization or person, e.g. the TIN in the US or the CIF/NIF in Spain. taxonRank: URIRef # The taxonomic rank of this taxon given preferably as a URI from a controlled vocabulary – (typically the ranks from TDWG TaxonRank ontology or equivalent Wikidata URIs). taxonomicRange: URIRef # The taxonomic grouping of the organism that expresses, encodes, or in someway related to the BioChemEntity. teaches: URIRef # The item being described is intended to help a person learn the competency or learning outcome defined by the referenced term. telephone: URIRef # The telephone number. temporal: URIRef # The "temporal" property can be used in cases where more specific properties (e.g. [[temporalCoverage]], [[dateCreated]], [[dateModified]], [[datePublished]]) are not known to be appropriate. temporalCoverage: URIRef # The temporalCoverage of a CreativeWork indicates the period that the content applies to, i.e. that it describes, either as a DateTime or as a textual string indicating a time period in [ISO 8601 time interval format](https://en.wikipedia.org/wiki/ISO_8601#Time_intervals). In the case of a Dataset it will typically indicate the relevant time period in a precise notation (e.g. for a 2011 census dataset, the year 2011 would be written "2011/2012"). Other forms of content e.g. ScholarlyArticle, Book, TVSeries or TVEpisode may indicate their temporalCoverage in broader terms - textually or via well-known URL. Written works such as books may sometimes have precise temporal coverage too, e.g. a work set in 1939 - 1945 can be indicated in ISO 8601 interval format format via "1939/1945". Open-ended date ranges can be written with ".." in place of the end date. For example, "2015-11/.." indicates a range beginning in November 2015 and with no specified final date. This is tentative and might be updated in future when ISO 8601 is officially updated. termCode: URIRef # A code that identifies this [[DefinedTerm]] within a [[DefinedTermSet]] termDuration: URIRef # The amount of time in a term as defined by the institution. A term is a length of time where students take one or more classes. Semesters and quarters are common units for term. termsOfService: URIRef # Human-readable terms of service documentation. termsPerYear: URIRef # The number of times terms of study are offered per year. Semesters and quarters are common units for term. For example, if the student can only take 2 semesters for the program in one year, then termsPerYear should be 2. text: URIRef # The textual content of this CreativeWork. textValue: URIRef # Text value being annotated. thumbnail: URIRef # Thumbnail image for an image or video. thumbnailUrl: URIRef # A thumbnail image relevant to the Thing. tickerSymbol: URIRef # The exchange traded instrument associated with a Corporation object. The tickerSymbol is expressed as an exchange and an instrument name separated by a space character. For the exchange component of the tickerSymbol attribute, we recommend using the controlled vocabulary of Market Identifier Codes (MIC) specified in ISO15022. ticketNumber: URIRef # The unique identifier for the ticket. ticketToken: URIRef # Reference to an asset (e.g., Barcode, QR code image or PDF) usable for entrance. ticketedSeat: URIRef # The seat associated with the ticket. timeOfDay: URIRef # The time of day the program normally runs. For example, "evenings". timeRequired: URIRef # Approximate or typical time it takes to work with or through this learning resource for the typical intended target audience, e.g. 'PT30M', 'PT1H25M'. timeToComplete: URIRef # The expected length of time to complete the program if attending full-time. tissueSample: URIRef # The type of tissue sample required for the test. title: URIRef # The title of the job. titleEIDR: URIRef # An [EIDR](https://eidr.org/) (Entertainment Identifier Registry) [[identifier]] representing at the most general/abstract level, a work of film or television. For example, the motion picture known as "Ghostbusters" has a titleEIDR of "10.5240/7EC7-228A-510A-053E-CBB8-J". This title (or work) may have several variants, which EIDR calls "edits". See [[editEIDR]]. Since schema.org types like [[Movie]] and [[TVEpisode]] can be used for both works and their multiple expressions, it is possible to use [[titleEIDR]] alone (for a general description), or alongside [[editEIDR]] for a more edit-specific description. toLocation: URIRef # A sub property of location. The final location of the object or the agent after the action. toRecipient: URIRef # A sub property of recipient. The recipient who was directly sent the message. tocContinuation: URIRef # A [[HyperTocEntry]] can have a [[tocContinuation]] indicated, which is another [[HyperTocEntry]] that would be the default next item to play or render. tocEntry: URIRef # Indicates a [[HyperTocEntry]] in a [[HyperToc]]. tongueWeight: URIRef # The permitted vertical load (TWR) of a trailer attached to the vehicle. Also referred to as Tongue Load Rating (TLR) or Vertical Load Rating (VLR)\n\nTypical unit code(s): KGM for kilogram, LBR for pound\n\n* Note 1: You can indicate additional information in the [[name]] of the [[QuantitativeValue]] node.\n* Note 2: You may also link to a [[QualitativeValue]] node that provides additional information using [[valueReference]].\n* Note 3: Note that you can use [[minValue]] and [[maxValue]] to indicate ranges. tool: URIRef # A sub property of instrument. An object used (but not consumed) when performing instructions or a direction. torque: URIRef # The torque (turning force) of the vehicle's engine.\n\nTypical unit code(s): NU for newton metre (N m), F17 for pound-force per foot, or F48 for pound-force per inch\n\n* Note 1: You can link to information about how the given value has been determined (e.g. reference RPM) using the [[valueReference]] property.\n* Note 2: You can use [[minValue]] and [[maxValue]] to indicate ranges. totalJobOpenings: URIRef # The number of positions open for this job posting. Use a positive integer. Do not use if the number of positions is unclear or not known. totalPaymentDue: URIRef # The total amount due. totalPrice: URIRef # The total price for the reservation or ticket, including applicable taxes, shipping, etc.\n\nUsage guidelines:\n\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator. totalTime: URIRef # The total time required to perform instructions or a direction (including time to prepare the supplies), in [ISO 8601 duration format](http://en.wikipedia.org/wiki/ISO_8601). tourBookingPage: URIRef # A page providing information on how to book a tour of some [[Place]], such as an [[Accommodation]] or [[ApartmentComplex]] in a real estate setting, as well as other kinds of tours as appropriate. touristType: URIRef # Attraction suitable for type(s) of tourist. eg. Children, visitors from a particular country, etc. track: URIRef # A music recording (track)—usually a single song. If an ItemList is given, the list should contain items of type MusicRecording. trackingNumber: URIRef # Shipper tracking number. trackingUrl: URIRef # Tracking url for the parcel delivery. tracks: URIRef # A music recording (track)—usually a single song. trailer: URIRef # The trailer of a movie or tv/radio series, season, episode, etc. trailerWeight: URIRef # The permitted weight of a trailer attached to the vehicle.\n\nTypical unit code(s): KGM for kilogram, LBR for pound\n* Note 1: You can indicate additional information in the [[name]] of the [[QuantitativeValue]] node.\n* Note 2: You may also link to a [[QualitativeValue]] node that provides additional information using [[valueReference]].\n* Note 3: Note that you can use [[minValue]] and [[maxValue]] to indicate ranges. trainName: URIRef # The name of the train (e.g. The Orient Express). trainNumber: URIRef # The unique identifier for the train. trainingSalary: URIRef # The estimated salary earned while in the program. transFatContent: URIRef # The number of grams of trans fat. transcript: URIRef # If this MediaObject is an AudioObject or VideoObject, the transcript of that object. transitTime: URIRef # The typical delay the order has been sent for delivery and the goods reach the final customer. Typical properties: minValue, maxValue, unitCode (d for DAY). transitTimeLabel: URIRef # Label to match an [[OfferShippingDetails]] with a [[DeliveryTimeSettings]] (within the context of a [[shippingSettingsLink]] cross-reference). translationOfWork: URIRef # The work that this work has been translated from. e.g. 物种起源 is a translationOf “On the Origin of Species” translator: URIRef # Organization or person who adapts a creative work to different languages, regional differences and technical requirements of a target market, or that translates during some event. transmissionMethod: URIRef # How the disease spreads, either as a route or vector, for example 'direct contact', 'Aedes aegypti', etc. travelBans: URIRef # Information about travel bans, e.g. in the context of a pandemic. trialDesign: URIRef # Specifics about the trial design (enumerated). tributary: URIRef # The anatomical or organ system that the vein flows into; a larger structure that the vein connects to. typeOfBed: URIRef # The type of bed to which the BedDetail refers, i.e. the type of bed available in the quantity indicated by quantity. typeOfGood: URIRef # The product that this structured value is referring to. typicalAgeRange: URIRef # The typical expected age range, e.g. '7-9', '11-'. typicalCreditsPerTerm: URIRef # The number of credits or units a full-time student would be expected to take in 1 term however 'term' is defined by the institution. typicalTest: URIRef # A medical test typically performed given this condition. underName: URIRef # The person or organization the reservation or ticket is for. unitCode: URIRef # The unit of measurement given using the UN/CEFACT Common Code (3 characters) or a URL. Other codes than the UN/CEFACT Common Code may be used with a prefix followed by a colon. unitText: URIRef # A string or text indicating the unit of measurement. Useful if you cannot provide a standard unit code for unitCode. unnamedSourcesPolicy: URIRef # For an [[Organization]] (typically a [[NewsMediaOrganization]]), a statement about policy on use of unnamed sources and the decision process required. unsaturatedFatContent: URIRef # The number of grams of unsaturated fat. uploadDate: URIRef # Date when this media object was uploaded to this site. upvoteCount: URIRef # The number of upvotes this question, answer or comment has received from the community. url: URIRef # URL of the item. urlTemplate: URIRef # An url template (RFC6570) that will be used to construct the target of the execution of the action. usageInfo: URIRef # The schema.org [[usageInfo]] property indicates further information about a [[CreativeWork]]. This property is applicable both to works that are freely available and to those that require payment or other transactions. It can reference additional information e.g. community expectations on preferred linking and citation conventions, as well as purchasing details. For something that can be commercially licensed, usageInfo can provide detailed, resource-specific information about licensing options. This property can be used alongside the license property which indicates license(s) applicable to some piece of content. The usageInfo property can provide information about other licensing options, e.g. acquiring commercial usage rights for an image that is also available under non-commercial creative commons licenses. usedToDiagnose: URIRef # A condition the test is used to diagnose. userInteractionCount: URIRef # The number of interactions for the CreativeWork using the WebSite or SoftwareApplication. usesDevice: URIRef # Device used to perform the test. usesHealthPlanIdStandard: URIRef # The standard for interpreting thePlan ID. The preferred is "HIOS". See the Centers for Medicare & Medicaid Services for more details. utterances: URIRef # Text of an utterances (spoken words, lyrics etc.) that occurs at a certain section of a media object, represented as a [[HyperTocEntry]]. validFor: URIRef # The duration of validity of a permit or similar thing. validFrom: URIRef # The date when the item becomes valid. validIn: URIRef # The geographic area where a permit or similar thing is valid. validThrough: URIRef # The date after when the item is not valid. For example the end of an offer, salary period, or a period of opening hours. validUntil: URIRef # The date when the item is no longer valid. value: URIRef # The value of the quantitative value or property value node.\n\n* For [[QuantitativeValue]] and [[MonetaryAmount]], the recommended type for values is 'Number'.\n* For [[PropertyValue]], it can be 'Text;', 'Number', 'Boolean', or 'StructuredValue'.\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator. valueAddedTaxIncluded: URIRef # Specifies whether the applicable value-added tax (VAT) is included in the price specification or not. valueMaxLength: URIRef # Specifies the allowed range for number of characters in a literal value. valueMinLength: URIRef # Specifies the minimum allowed range for number of characters in a literal value. valueName: URIRef # Indicates the name of the PropertyValueSpecification to be used in URL templates and form encoding in a manner analogous to HTML's input@name. valuePattern: URIRef # Specifies a regular expression for testing literal values according to the HTML spec. valueReference: URIRef # A secondary value that provides additional information on the original value, e.g. a reference temperature or a type of measurement. valueRequired: URIRef # Whether the property must be filled in to complete the action. Default is false. variableMeasured: URIRef # The variableMeasured property can indicate (repeated as necessary) the variables that are measured in some dataset, either described as text or as pairs of identifier and description using PropertyValue. variantCover: URIRef # A description of the variant cover for the issue, if the issue is a variant printing. For example, "Bryan Hitch Variant Cover" or "2nd Printing Variant". variesBy: URIRef # Indicates the property or properties by which the variants in a [[ProductGroup]] vary, e.g. their size, color etc. Schema.org properties can be referenced by their short name e.g. "color"; terms defined elsewhere can be referenced with their URIs. vatID: URIRef # The Value-added Tax ID of the organization or person. vehicleConfiguration: URIRef # A short text indicating the configuration of the vehicle, e.g. '5dr hatchback ST 2.5 MT 225 hp' or 'limited edition'. vehicleEngine: URIRef # Information about the engine or engines of the vehicle. vehicleIdentificationNumber: URIRef # The Vehicle Identification Number (VIN) is a unique serial number used by the automotive industry to identify individual motor vehicles. vehicleInteriorColor: URIRef # The color or color combination of the interior of the vehicle. vehicleInteriorType: URIRef # The type or material of the interior of the vehicle (e.g. synthetic fabric, leather, wood, etc.). While most interior types are characterized by the material used, an interior type can also be based on vehicle usage or target audience. vehicleModelDate: URIRef # The release date of a vehicle model (often used to differentiate versions of the same make and model). vehicleSeatingCapacity: URIRef # The number of passengers that can be seated in the vehicle, both in terms of the physical space available, and in terms of limitations set by law.\n\nTypical unit code(s): C62 for persons. vehicleSpecialUsage: URIRef # Indicates whether the vehicle has been used for special purposes, like commercial rental, driving school, or as a taxi. The legislation in many countries requires this information to be revealed when offering a car for sale. vehicleTransmission: URIRef # The type of component used for transmitting the power from a rotating power source to the wheels or other relevant component(s) ("gearbox" for cars). vendor: URIRef # 'vendor' is an earlier term for 'seller'. verificationFactCheckingPolicy: URIRef # Disclosure about verification and fact-checking processes for a [[NewsMediaOrganization]] or other fact-checking [[Organization]]. version: URIRef # The version of the CreativeWork embodied by a specified resource. video: URIRef # An embedded video object. videoFormat: URIRef # The type of screening or video broadcast used (e.g. IMAX, 3D, SD, HD, etc.). videoFrameSize: URIRef # The frame size of the video. videoQuality: URIRef # The quality of the video. volumeNumber: URIRef # Identifies the volume of publication or multi-part work; for example, "iii" or "2". warning: URIRef # Any FDA or other warnings about the drug (text or URL). warranty: URIRef # The warranty promise(s) included in the offer. warrantyPromise: URIRef # The warranty promise(s) included in the offer. warrantyScope: URIRef # The scope of the warranty promise. webCheckinTime: URIRef # The time when a passenger can check into the flight online. webFeed: URIRef # The URL for a feed, e.g. associated with a podcast series, blog, or series of date-stamped updates. This is usually RSS or Atom. weight: URIRef # The weight of the product or person. weightTotal: URIRef # The permitted total weight of the loaded vehicle, including passengers and cargo and the weight of the empty vehicle.\n\nTypical unit code(s): KGM for kilogram, LBR for pound\n\n* Note 1: You can indicate additional information in the [[name]] of the [[QuantitativeValue]] node.\n* Note 2: You may also link to a [[QualitativeValue]] node that provides additional information using [[valueReference]].\n* Note 3: Note that you can use [[minValue]] and [[maxValue]] to indicate ranges. wheelbase: URIRef # The distance between the centers of the front and rear wheels.\n\nTypical unit code(s): CMT for centimeters, MTR for meters, INH for inches, FOT for foot/feet width: URIRef # The width of the item. winner: URIRef # A sub property of participant. The winner of the action. wordCount: URIRef # The number of words in the text of the Article. workExample: URIRef # Example/instance/realization/derivation of the concept of this creative work. eg. The paperback edition, first edition, or eBook. workFeatured: URIRef # A work featured in some event, e.g. exhibited in an ExhibitionEvent. Specific subproperties are available for workPerformed (e.g. a play), or a workPresented (a Movie at a ScreeningEvent). workHours: URIRef # The typical working hours for this job (e.g. 1st shift, night shift, 8am-5pm). workLocation: URIRef # A contact location for a person's place of work. workPerformed: URIRef # A work performed in some event, for example a play performed in a TheaterEvent. workPresented: URIRef # The movie presented during this event. workTranslation: URIRef # A work that is a translation of the content of this work. e.g. 西遊記 has an English workTranslation “Journey to the West”,a German workTranslation “Monkeys Pilgerfahrt” and a Vietnamese translation Tây du ký bình khảo. workload: URIRef # Quantitative measure of the physiologic output of the exercise; also referred to as energy expenditure. worksFor: URIRef # Organizations that the person works for. worstRating: URIRef # The lowest value allowed in this rating system. If worstRating is omitted, 1 is assumed. xpath: URIRef # An XPath, e.g. of a [[SpeakableSpecification]] or [[WebPageElement]]. In the latter case, multiple matches within a page can constitute a single conceptual "Web page element". yearBuilt: URIRef # The year an [[Accommodation]] was constructed. This corresponds to the [YearBuilt field in RESO](https://ddwiki.reso.org/display/DDW17/YearBuilt+Field). yearlyRevenue: URIRef # The size of the business in annual revenue. yearsInOperation: URIRef # The age of the business. # yield: URIRef # The quantity that results by performing instructions. For example, a paper airplane, 10 personalized candles. rdflib-6.1.1/rdflib/namespace/_SH.py000066400000000000000000000547711415774155300172340ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class SH(DefinedNamespace): """ W3C Shapes Constraint Language (SHACL) Vocabulary This vocabulary defines terms used in SHACL, the W3C Shapes Constraint Language. Generated from: https://www.w3.org/ns/shacl.ttl Date: 2020-05-26 14:20:08.041103 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property alternativePath: URIRef # The (single) value of this property must be a list of path elements, representing the elements of alternative paths. annotationProperty: URIRef # The annotation property that shall be set. annotationValue: URIRef # The (default) values of the annotation property. annotationVarName: URIRef # The name of the SPARQL variable from the SELECT clause that shall be used for the values. ask: URIRef # The SPARQL ASK query to execute. closed: URIRef # If set to true then the shape is closed. condition: URIRef # The shapes that the focus nodes need to conform to before a rule is executed on them. conforms: URIRef # True if the validation did not produce any validation results, and false otherwise. construct: URIRef # The SPARQL CONSTRUCT query to execute. datatype: URIRef # Specifies an RDF datatype that all value nodes must have. deactivated: URIRef # If set to true then all nodes conform to this. declare: URIRef # Links a resource with its namespace prefix declarations. defaultValue: URIRef # A default value for a property, for example for user interface tools to pre-populate input fields. description: URIRef # Human-readable descriptions for the property in the context of the surrounding shape. detail: URIRef # Links a result with other results that provide more details, for example to describe violations against nested shapes. disjoint: URIRef # Specifies a property where the set of values must be disjoint with the value nodes. entailment: URIRef # An entailment regime that indicates what kind of inferencing is required by a shapes graph. equals: URIRef # Specifies a property that must have the same values as the value nodes. expression: URIRef # The node expression that must return true for the value nodes. filterShape: URIRef # The shape that all input nodes of the expression need to conform to. flags: URIRef # An optional flag to be used with regular expression pattern matching. focusNode: URIRef # The focus node that was validated when the result was produced. group: URIRef # Can be used to link to a property group to indicate that a property shape belongs to a group of related property shapes. hasValue: URIRef # Specifies a value that must be among the value nodes. ignoredProperties: URIRef # An optional RDF list of properties that are also permitted in addition to those explicitly enumerated via sh:property/sh:path. intersection: URIRef # A list of node expressions that shall be intersected. inversePath: URIRef # The (single) value of this property represents an inverse path (object to subject). js: URIRef # Constraints expressed in JavaScript. jsFunctionName: URIRef # The name of the JavaScript function to execute. jsLibrary: URIRef # Declares which JavaScript libraries are needed to execute this. jsLibraryURL: URIRef # Declares the URLs of a JavaScript library. This should be the absolute URL of a JavaScript file. Implementations may redirect those to local files. labelTemplate: URIRef # Outlines how human-readable labels of instances of the associated Parameterizable shall be produced. The values can contain {?paramName} as placeholders for the actual values of the given parameter. languageIn: URIRef # Specifies a list of language tags that all value nodes must have. lessThan: URIRef # Specifies a property that must have smaller values than the value nodes. lessThanOrEquals: URIRef # Specifies a property that must have smaller or equal values than the value nodes. maxCount: URIRef # Specifies the maximum number of values in the set of value nodes. maxExclusive: URIRef # Specifies the maximum exclusive value of each value node. maxInclusive: URIRef # Specifies the maximum inclusive value of each value node. maxLength: URIRef # Specifies the maximum string length of each value node. message: URIRef # A human-readable message (possibly with placeholders for variables) explaining the cause of the result. minCount: URIRef # Specifies the minimum number of values in the set of value nodes. minExclusive: URIRef # Specifies the minimum exclusive value of each value node. minInclusive: URIRef # Specifies the minimum inclusive value of each value node. minLength: URIRef # Specifies the minimum string length of each value node. name: URIRef # Human-readable labels for the property in the context of the surrounding shape. namespace: URIRef # The namespace associated with a prefix in a prefix declaration. node: URIRef # Specifies the node shape that all value nodes must conform to. nodeKind: URIRef # Specifies the node kind (e.g. IRI or literal) each value node. nodeValidator: URIRef # The validator(s) used to evaluate a constraint in the context of a node shape. nodes: URIRef # The node expression producing the input nodes of a filter shape expression. object: URIRef # An expression producing the nodes that shall be inferred as objects. oneOrMorePath: URIRef # The (single) value of this property represents a path that is matched one or more times. optional: URIRef # Indicates whether a parameter is optional. order: URIRef # Specifies the relative order of this compared to its siblings. For example use 0 for the first, 1 for the second. parameter: URIRef # The parameters of a function or constraint component. path: URIRef # Specifies the property path of a property shape. pattern: URIRef # Specifies a regular expression pattern that the string representations of the value nodes must match. predicate: URIRef # An expression producing the properties that shall be inferred as predicates. prefix: URIRef # The prefix of a prefix declaration. prefixes: URIRef # The prefixes that shall be applied before parsing the associated SPARQL query. property: URIRef # Links a shape to its property shapes. propertyValidator: URIRef # The validator(s) used to evaluate a constraint in the context of a property shape. qualifiedMaxCount: URIRef # The maximum number of value nodes that can conform to the shape. qualifiedMinCount: URIRef # The minimum number of value nodes that must conform to the shape. qualifiedValueShape: URIRef # The shape that a specified number of values must conform to. qualifiedValueShapesDisjoint: URIRef # Can be used to mark the qualified value shape to be disjoint with its sibling shapes. result: URIRef # The validation results contained in a validation report. resultAnnotation: URIRef # Links a SPARQL validator with zero or more sh:ResultAnnotation instances, defining how to derive additional result properties based on the variables of the SELECT query. resultMessage: URIRef # Human-readable messages explaining the cause of the result. resultPath: URIRef # The path of a validation result, based on the path of the validated property shape. resultSeverity: URIRef # The severity of the result, e.g. warning. returnType: URIRef # The expected type of values returned by the associated function. rule: URIRef # The rules linked to a shape. select: URIRef # The SPARQL SELECT query to execute. severity: URIRef # Defines the severity that validation results produced by a shape must have. Defaults to sh:Violation. shapesGraph: URIRef # Shapes graphs that should be used when validating this data graph. shapesGraphWellFormed: URIRef # If true then the validation engine was certain that the shapes graph has passed all SHACL syntax requirements during the validation process. sourceConstraint: URIRef # The constraint that was validated when the result was produced. sourceConstraintComponent: URIRef # The constraint component that is the source of the result. sourceShape: URIRef # The shape that is was validated when the result was produced. sparql: URIRef # Links a shape with SPARQL constraints. subject: URIRef # An expression producing the resources that shall be inferred as subjects. suggestedShapesGraph: URIRef # Suggested shapes graphs for this ontology. The values of this property may be used in the absence of specific sh:shapesGraph statements. target: URIRef # Links a shape to a target specified by an extension language, for example instances of sh:SPARQLTarget. targetClass: URIRef # Links a shape to a class, indicating that all instances of the class must conform to the shape. targetNode: URIRef # Links a shape to individual nodes, indicating that these nodes must conform to the shape. targetObjectsOf: URIRef # Links a shape to a property, indicating that all all objects of triples that have the given property as their predicate must conform to the shape. targetSubjectsOf: URIRef # Links a shape to a property, indicating that all subjects of triples that have the given property as their predicate must conform to the shape. union: URIRef # A list of node expressions that shall be used together. uniqueLang: URIRef # Specifies whether all node values must have a unique (or no) language tag. update: URIRef # The SPARQL UPDATE to execute. validator: URIRef # The validator(s) used to evaluate constraints of either node or property shapes. value: URIRef # An RDF node that has caused the result. xone: URIRef # Specifies a list of shapes so that the value nodes must conform to exactly one of the shapes. zeroOrMorePath: URIRef # The (single) value of this property represents a path that is matched zero or more times. zeroOrOnePath: URIRef # The (single) value of this property represents a path that is matched zero or one times. # http://www.w3.org/2000/01/rdf-schema#Class AbstractResult: URIRef # The base class of validation results, typically not instantiated directly. ConstraintComponent: URIRef # The class of constraint components. Function: URIRef # The class of SHACL functions. JSConstraint: URIRef # The class of constraints backed by a JavaScript function. JSExecutable: URIRef # Abstract base class of resources that declare an executable JavaScript. JSFunction: URIRef # The class of SHACL functions that execute a JavaScript function when called. JSLibrary: URIRef # Represents a JavaScript library, typically identified by one or more URLs of files to include. JSRule: URIRef # The class of SHACL rules expressed using JavaScript. JSTarget: URIRef # The class of targets that are based on JavaScript functions. JSTargetType: URIRef # The (meta) class for parameterizable targets that are based on JavaScript functions. JSValidator: URIRef # A SHACL validator based on JavaScript. This can be used to declare SHACL constraint components that perform JavaScript-based validation when used. NodeKind: URIRef # The class of all node kinds, including sh:BlankNode, sh:IRI, sh:Literal or the combinations of these: sh:BlankNodeOrIRI, sh:BlankNodeOrLiteral, sh:IRIOrLiteral. NodeShape: URIRef # A node shape is a shape that specifies constraint that need to be met with respect to focus nodes. Parameter: URIRef # The class of parameter declarations, consisting of a path predicate and (possibly) information about allowed value type, cardinality and other characteristics. Parameterizable: URIRef # Superclass of components that can take parameters, especially functions and constraint components. PrefixDeclaration: URIRef # The class of prefix declarations, consisting of pairs of a prefix with a namespace. PropertyGroup: URIRef # Instances of this class represent groups of property shapes that belong together. PropertyShape: URIRef # A property shape is a shape that specifies constraints on the values of a focus node for a given property or path. ResultAnnotation: URIRef # A class of result annotations, which define the rules to derive the values of a given annotation property as extra values for a validation result. Rule: URIRef # The class of SHACL rules. Never instantiated directly. SPARQLAskExecutable: URIRef # The class of SPARQL executables that are based on an ASK query. SPARQLAskValidator: URIRef # The class of validators based on SPARQL ASK queries. The queries are evaluated for each value node and are supposed to return true if the given node conforms. SPARQLConstraint: URIRef # The class of constraints based on SPARQL SELECT queries. SPARQLConstructExecutable: URIRef # The class of SPARQL executables that are based on a CONSTRUCT query. SPARQLExecutable: URIRef # The class of resources that encapsulate a SPARQL query. SPARQLFunction: URIRef # A function backed by a SPARQL query - either ASK or SELECT. SPARQLRule: URIRef # The class of SHACL rules based on SPARQL CONSTRUCT queries. SPARQLSelectExecutable: URIRef # The class of SPARQL executables based on a SELECT query. SPARQLSelectValidator: URIRef # The class of validators based on SPARQL SELECT queries. The queries are evaluated for each focus node and are supposed to produce bindings for all focus nodes that do not conform. SPARQLTarget: URIRef # The class of targets that are based on SPARQL queries. SPARQLTargetType: URIRef # The (meta) class for parameterizable targets that are based on SPARQL queries. SPARQLUpdateExecutable: URIRef # The class of SPARQL executables based on a SPARQL UPDATE. Severity: URIRef # The class of validation result severity levels, including violation and warning levels. Shape: URIRef # A shape is a collection of constraints that may be targeted for certain nodes. Target: URIRef # The base class of targets such as those based on SPARQL queries. TargetType: URIRef # The (meta) class for parameterizable targets. Instances of this are instantiated as values of the sh:target property. TripleRule: URIRef # A rule based on triple (subject, predicate, object) pattern. ValidationReport: URIRef # The class of SHACL validation reports. ValidationResult: URIRef # The class of validation results. Validator: URIRef # The class of validators, which provide instructions on how to process a constraint definition. This class serves as base class for the SPARQL-based validators and other possible implementations. # http://www.w3.org/2000/01/rdf-schema#Resource this: URIRef # A node expression that represents the current focus node. # http://www.w3.org/ns/shacl#ConstraintComponent AndConstraintComponent: URIRef # A constraint component that can be used to test whether a value node conforms to all members of a provided list of shapes. ClassConstraintComponent: URIRef # A constraint component that can be used to verify that each value node is an instance of a given type. ClosedConstraintComponent: URIRef # A constraint component that can be used to indicate that focus nodes must only have values for those properties that have been explicitly enumerated via sh:property/sh:path. DatatypeConstraintComponent: URIRef # A constraint component that can be used to restrict the datatype of all value nodes. DisjointConstraintComponent: URIRef # A constraint component that can be used to verify that the set of value nodes is disjoint with the the set of nodes that have the focus node as subject and the value of a given property as predicate. EqualsConstraintComponent: URIRef # A constraint component that can be used to verify that the set of value nodes is equal to the set of nodes that have the focus node as subject and the value of a given property as predicate. ExpressionConstraintComponent: URIRef # A constraint component that can be used to verify that a given node expression produces true for all value nodes. HasValueConstraintComponent: URIRef # A constraint component that can be used to verify that one of the value nodes is a given RDF node. InConstraintComponent: URIRef # A constraint component that can be used to exclusively enumerate the permitted value nodes. JSConstraintComponent: URIRef # A constraint component with the parameter sh:js linking to a sh:JSConstraint containing a sh:script. LanguageInConstraintComponent: URIRef # A constraint component that can be used to enumerate language tags that all value nodes must have. LessThanConstraintComponent: URIRef # A constraint component that can be used to verify that each value node is smaller than all the nodes that have the focus node as subject and the value of a given property as predicate. LessThanOrEqualsConstraintComponent: URIRef # A constraint component that can be used to verify that every value node is smaller than all the nodes that have the focus node as subject and the value of a given property as predicate. MaxCountConstraintComponent: URIRef # A constraint component that can be used to restrict the maximum number of value nodes. MaxExclusiveConstraintComponent: URIRef # A constraint component that can be used to restrict the range of value nodes with a maximum exclusive value. MaxInclusiveConstraintComponent: URIRef # A constraint component that can be used to restrict the range of value nodes with a maximum inclusive value. MaxLengthConstraintComponent: URIRef # A constraint component that can be used to restrict the maximum string length of value nodes. MinCountConstraintComponent: URIRef # A constraint component that can be used to restrict the minimum number of value nodes. MinExclusiveConstraintComponent: URIRef # A constraint component that can be used to restrict the range of value nodes with a minimum exclusive value. MinInclusiveConstraintComponent: URIRef # A constraint component that can be used to restrict the range of value nodes with a minimum inclusive value. MinLengthConstraintComponent: URIRef # A constraint component that can be used to restrict the minimum string length of value nodes. NodeConstraintComponent: URIRef # A constraint component that can be used to verify that all value nodes conform to the given node shape. NodeKindConstraintComponent: URIRef # A constraint component that can be used to restrict the RDF node kind of each value node. NotConstraintComponent: URIRef # A constraint component that can be used to verify that value nodes do not conform to a given shape. OrConstraintComponent: URIRef # A constraint component that can be used to restrict the value nodes so that they conform to at least one out of several provided shapes. PatternConstraintComponent: URIRef # A constraint component that can be used to verify that every value node matches a given regular expression. PropertyConstraintComponent: URIRef # A constraint component that can be used to verify that all value nodes conform to the given property shape. QualifiedMaxCountConstraintComponent: URIRef # A constraint component that can be used to verify that a specified maximum number of value nodes conforms to a given shape. QualifiedMinCountConstraintComponent: URIRef # A constraint component that can be used to verify that a specified minimum number of value nodes conforms to a given shape. SPARQLConstraintComponent: URIRef # A constraint component that can be used to define constraints based on SPARQL queries. UniqueLangConstraintComponent: URIRef # A constraint component that can be used to specify that no pair of value nodes may use the same language tag. XoneConstraintComponent: URIRef # A constraint component that can be used to restrict the value nodes so that they conform to exactly one out of several provided shapes. # http://www.w3.org/ns/shacl#NodeKind BlankNode: URIRef # The node kind of all blank nodes. BlankNodeOrIRI: URIRef # The node kind of all blank nodes or IRIs. BlankNodeOrLiteral: URIRef # The node kind of all blank nodes or literals. IRI: URIRef # The node kind of all IRIs. IRIOrLiteral: URIRef # The node kind of all IRIs or literals. Literal: URIRef # The node kind of all literals. # http://www.w3.org/ns/shacl#Parameter # http://www.w3.org/ns/shacl#Severity Info: URIRef # The severity for an informational validation result. Violation: URIRef # The severity for a violation validation result. Warning: URIRef # The severity for a warning validation result. # Valid non-python identifiers _extras = [ "and", "class", "in", "not", "or", "AndConstraintComponent-and", "ClassConstraintComponent-class", "ClosedConstraintComponent-closed", "ClosedConstraintComponent-ignoredProperties", "DatatypeConstraintComponent-datatype", "DisjointConstraintComponent-disjoint", "EqualsConstraintComponent-equals", "ExpressionConstraintComponent-expression", "HasValueConstraintComponent-hasValue", "InConstraintComponent-in", "JSConstraint-js", "LanguageInConstraintComponent-languageIn", "LessThanConstraintComponent-lessThan", "LessThanOrEqualsConstraintComponent-lessThanOrEquals", "MaxCountConstraintComponent-maxCount", "MaxExclusiveConstraintComponent-maxExclusive", "MaxInclusiveConstraintComponent-maxInclusive", "MaxLengthConstraintComponent-maxLength", "MinCountConstraintComponent-minCount", "MinExclusiveConstraintComponent-minExclusive", "MinInclusiveConstraintComponent-minInclusive", "MinLengthConstraintComponent-minLength", "NodeConstraintComponent-node", "NodeKindConstraintComponent-nodeKind", "NotConstraintComponent-not", "OrConstraintComponent-or", "PatternConstraintComponent-flags", "PatternConstraintComponent-pattern", "PropertyConstraintComponent-property", "QualifiedMaxCountConstraintComponent-qualifiedMaxCount", "QualifiedMaxCountConstraintComponent-qualifiedValueShape", "QualifiedMaxCountConstraintComponent-qualifiedValueShapesDisjoint", "QualifiedMinCountConstraintComponent-qualifiedMinCount", "QualifiedMinCountConstraintComponent-qualifiedValueShape", "QualifiedMinCountConstraintComponent-qualifiedValueShapesDisjoint", "SPARQLConstraintComponent-sparql", "UniqueLangConstraintComponent-uniqueLang", "XoneConstraintComponent-xone", ] _NS = Namespace("http://www.w3.org/ns/shacl#") rdflib-6.1.1/rdflib/namespace/_SKOS.py000066400000000000000000000110331415774155300174610ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class SKOS(DefinedNamespace): """ SKOS Vocabulary An RDF vocabulary for describing the basic structure and content of concept schemes such as thesauri, classification schemes, subject heading lists, taxonomies, 'folksonomies', other types of controlled vocabulary, and also concept schemes embedded in glossaries and terminologies. Generated from: https://www.w3.org/2009/08/skos-reference/skos.rdf Date: 2020-05-26 14:20:08.489187 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property altLabel: URIRef # An alternative lexical label for a resource. broadMatch: URIRef # skos:broadMatch is used to state a hierarchical mapping link between two conceptual resources in different concept schemes. broader: URIRef # Relates a concept to a concept that is more general in meaning. broaderTransitive: URIRef # skos:broaderTransitive is a transitive superproperty of skos:broader. changeNote: URIRef # A note about a modification to a concept. closeMatch: URIRef # skos:closeMatch is used to link two concepts that are sufficiently similar that they can be used interchangeably in some information retrieval applications. In order to avoid the possibility of "compound errors" when combining mappings across more than two concept schemes, skos:closeMatch is not declared to be a transitive property. definition: URIRef # A statement or formal explanation of the meaning of a concept. editorialNote: URIRef # A note for an editor, translator or maintainer of the vocabulary. exactMatch: URIRef # skos:exactMatch is used to link two concepts, indicating a high degree of confidence that the concepts can be used interchangeably across a wide range of information retrieval applications. skos:exactMatch is a transitive property, and is a sub-property of skos:closeMatch. example: URIRef # An example of the use of a concept. hasTopConcept: URIRef # Relates, by convention, a concept scheme to a concept which is topmost in the broader/narrower concept hierarchies for that scheme, providing an entry point to these hierarchies. hiddenLabel: URIRef # A lexical label for a resource that should be hidden when generating visual displays of the resource, but should still be accessible to free text search operations. historyNote: URIRef # A note about the past state/use/meaning of a concept. inScheme: URIRef # Relates a resource (for example a concept) to a concept scheme in which it is included. mappingRelation: URIRef # Relates two concepts coming, by convention, from different schemes, and that have comparable meanings member: URIRef # Relates a collection to one of its members. memberList: URIRef # Relates an ordered collection to the RDF list containing its members. narrowMatch: URIRef # skos:narrowMatch is used to state a hierarchical mapping link between two conceptual resources in different concept schemes. narrower: URIRef # Relates a concept to a concept that is more specific in meaning. narrowerTransitive: URIRef # skos:narrowerTransitive is a transitive superproperty of skos:narrower. notation: URIRef # A notation, also known as classification code, is a string of characters such as "T58.5" or "303.4833" used to uniquely identify a concept within the scope of a given concept scheme. note: URIRef # A general note, for any purpose. prefLabel: URIRef # The preferred lexical label for a resource, in a given language. related: URIRef # Relates a concept to a concept with which there is an associative semantic relationship. relatedMatch: URIRef # skos:relatedMatch is used to state an associative mapping link between two conceptual resources in different concept schemes. scopeNote: URIRef # A note that helps to clarify the meaning and/or the use of a concept. semanticRelation: URIRef # Links a concept to a concept related by meaning. topConceptOf: URIRef # Relates a concept to the concept scheme that it is a top level concept of. # http://www.w3.org/2002/07/owl#Class Collection: URIRef # A meaningful collection of concepts. Concept: URIRef # An idea or notion; a unit of thought. ConceptScheme: URIRef # A set of concepts, optionally including statements about semantic relationships between those concepts. OrderedCollection: URIRef # An ordered collection of concepts, where both the grouping and the ordering are meaningful. _NS = Namespace("http://www.w3.org/2004/02/skos/core#") rdflib-6.1.1/rdflib/namespace/_SOSA.py000066400000000000000000000141651415774155300174600ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class SOSA(DefinedNamespace): """ Sensor, Observation, Sample, and Actuator (SOSA) Ontology This ontology is based on the SSN Ontology by the W3C Semantic Sensor Networks Incubator Group (SSN-XG), together with considerations from the W3C/OGC Spatial Data on the Web Working Group. Generated from: http://www.w3.org/ns/sosa/ Date: 2020-05-26 14:20:08.792504 """ # http://www.w3.org/2000/01/rdf-schema#Class ActuatableProperty: URIRef # An actuatable quality (property, characteristic) of a FeatureOfInterest. Actuation: URIRef # An Actuation carries out an (Actuation) Procedure to change the state of the world using an Actuator. Actuator: URIRef # A device that is used by, or implements, an (Actuation) Procedure that changes the state of the world. FeatureOfInterest: URIRef # The thing whose property is being estimated or calculated in the course of an Observation to arrive at a Result or whose property is being manipulated by an Actuator, or which is being sampled or transformed in an act of Sampling. ObservableProperty: URIRef # An observable quality (property, characteristic) of a FeatureOfInterest. Observation: URIRef # Act of carrying out an (Observation) Procedure to estimate or calculate a value of a property of a FeatureOfInterest. Links to a Sensor to describe what made the Observation and how; links to an ObservableProperty to describe what the result is an estimate of, and to a FeatureOfInterest to detail what that property was associated with. Platform: URIRef # A Platform is an entity that hosts other entities, particularly Sensors, Actuators, Samplers, and other Platforms. Procedure: URIRef # A workflow, protocol, plan, algorithm, or computational method specifying how to make an Observation, create a Sample, or make a change to the state of the world (via an Actuator). A Procedure is re-usable, and might be involved in many Observations, Samplings, or Actuations. It explains the steps to be carried out to arrive at reproducible results. Result: URIRef # The Result of an Observation, Actuation, or act of Sampling. To store an observation's simple result value one can use the hasSimpleResult property. Sample: URIRef # Feature which is intended to be representative of a FeatureOfInterest on which Observations may be made. Sampler: URIRef # A device that is used by, or implements, a Sampling Procedure to create or transform one or more samples. Sampling: URIRef # An act of Sampling carries out a sampling Procedure to create or transform one or more samples. Sensor: URIRef # Device, agent (including humans), or software (simulation) involved in, or implementing, a Procedure. Sensors respond to a stimulus, e.g., a change in the environment, or input data composed from the results of prior Observations, and generate a Result. Sensors can be hosted by Platforms. # http://www.w3.org/2002/07/owl#DatatypeProperty hasSimpleResult: URIRef # The simple value of an Observation or Actuation or act of Sampling. resultTime: URIRef # The result time is the instant of time when the Observation, Actuation or Sampling activity was completed. # http://www.w3.org/2002/07/owl#ObjectProperty actsOnProperty: URIRef # Relation between an Actuation and the property of a FeatureOfInterest it is acting upon. hasFeatureOfInterest: URIRef # A relation between an Observation and the entity whose quality was observed, or between an Actuation and the entity whose property was modified, or between an act of Sampling and the entity that was sampled. hasResult: URIRef # Relation linking an Observation or Actuation or act of Sampling and a Result or Sample. hasSample: URIRef # Relation between a FeatureOfInterest and the Sample used to represent it. hosts: URIRef # Relation between a Platform and a Sensor, Actuator, Sampler, or Platform, hosted or mounted on it. isActedOnBy: URIRef # Relation between an ActuatableProperty of a FeatureOfInterest and an Actuation changing its state. isFeatureOfInterestOf: URIRef # A relation between a FeatureOfInterest and an Observation about it, an Actuation acting on it, or an act of Sampling that sampled it. isHostedBy: URIRef # Relation between a Sensor, Actuator, Sampler, or Platform, and the Platform that it is mounted on or hosted by. isObservedBy: URIRef # Relation between an ObservableProperty and the Sensor able to observe it. isResultOf: URIRef # Relation linking a Result to the Observation or Actuation or act of Sampling that created or caused it. isSampleOf: URIRef # Relation from a Sample to the FeatureOfInterest that it is intended to be representative of. madeActuation: URIRef # Relation between an Actuator and the Actuation it has made. madeByActuator: URIRef # Relation linking an Actuation to the Actuator that made that Actuation. madeBySampler: URIRef # Relation linking an act of Sampling to the Sampler (sampling device or entity) that made it. madeBySensor: URIRef # Relation between an Observation and the Sensor which made the Observation. madeObservation: URIRef # Relation between a Sensor and an Observation made by the Sensor. madeSampling: URIRef # Relation between a Sampler (sampling device or entity) and the Sampling act it performed. observedProperty: URIRef # Relation linking an Observation to the property that was observed. The ObservableProperty should be a property of the FeatureOfInterest (linked by hasFeatureOfInterest) of this Observation. observes: URIRef # Relation between a Sensor and an ObservableProperty that it is capable of sensing. phenomenonTime: URIRef # The time that the Result of an Observation, Actuation or Sampling applies to the FeatureOfInterest. Not necessarily the same as the resultTime. May be an Interval or an Instant, or some other compound TemporalEntity. usedProcedure: URIRef # A relation to link to a re-usable Procedure used in making an Observation, an Actuation, or a Sample, typically through a Sensor, Actuator or Sampler. _NS = Namespace("http://www.w3.org/ns/sosa/") rdflib-6.1.1/rdflib/namespace/_SSN.py000066400000000000000000000061751415774155300173600ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class SSN(DefinedNamespace): """ Semantic Sensor Network Ontology This ontology describes sensors, actuators and observations, and related concepts. It does not describe domain concepts, time, locations, etc. these are intended to be included from other ontologies via OWL imports. Generated from: http://www.w3.org/ns/ssn/ Date: 2020-05-26 14:20:09.068204 """ # http://www.w3.org/2002/07/owl#Class Deployment: URIRef # Describes the Deployment of one or more Systems for a particular purpose. Deployment may be done on a Platform. Input: URIRef # Any information that is provided to a Procedure for its use. Output: URIRef # Any information that is reported from a Procedure. Property: URIRef # A quality of an entity. An aspect of an entity that is intrinsic to and cannot exist without the entity. Stimulus: URIRef # An event in the real world that 'triggers' the Sensor. The properties associated to the Stimulus may be different to the eventual observed ObservableProperty. It is the event, not the object, that triggers the Sensor. System: URIRef # System is a unit of abstraction for pieces of infrastructure that implement Procedures. A System may have components, its subsystems, which are other systems. # http://www.w3.org/2002/07/owl#FunctionalProperty wasOriginatedBy: URIRef # Relation between an Observation and the Stimulus that originated it. # http://www.w3.org/2002/07/owl#ObjectProperty deployedOnPlatform: URIRef # Relation between a Deployment and the Platform on which the Systems are deployed. deployedSystem: URIRef # Relation between a Deployment and a deployed System. detects: URIRef # A relation from a Sensor to the Stimulus that the Sensor detects. The Stimulus itself will be serving as a proxy for some ObservableProperty. forProperty: URIRef # A relation between some aspect of an entity and a Property. hasDeployment: URIRef # Relation between a System and a Deployment, recording that the System is deployed in that Deployment. hasInput: URIRef # Relation between a Procedure and an Input to it. hasOutput: URIRef # Relation between a Procedure and an Output of it. hasProperty: URIRef # Relation between an entity and a Property of that entity. hasSubSystem: URIRef # Relation between a System and its component parts. implementedBy: URIRef # Relation between a Procedure (an algorithm, procedure or method) and an entity that implements that Procedure in some executable way. implements: URIRef # Relation between an entity that implements a Procedure in some executable way and the Procedure (an algorithm, procedure or method). inDeployment: URIRef # Relation between a Platform and a Deployment, meaning that the deployedSystems of the Deployment are hosted on the Platform. isPropertyOf: URIRef # Relation between a Property and the entity it belongs to. isProxyFor: URIRef # A relation from a Stimulus to the Property that the Stimulus is serving as a proxy for. _NS = Namespace("http://www.w3.org/ns/ssn/") rdflib-6.1.1/rdflib/namespace/_TIME.py000066400000000000000000000311301415774155300174400ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class TIME(DefinedNamespace): """ OWL-Time Generated from: http://www.w3.org/2006/time# Date: 2020-05-26 14:20:10.531265 """ # http://www.w3.org/2000/01/rdf-schema#Datatype generalDay: URIRef # Day of month - formulated as a text string with a pattern constraint to reproduce the same lexical form as gDay, except that values up to 99 are permitted, in order to support calendars with more than 31 days in a month. Note that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type. generalMonth: URIRef # Month of year - formulated as a text string with a pattern constraint to reproduce the same lexical form as gMonth, except that values up to 20 are permitted, in order to support calendars with more than 12 months in the year. Note that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type. generalYear: URIRef # Year number - formulated as a text string with a pattern constraint to reproduce the same lexical form as gYear, but not restricted to values from the Gregorian calendar. Note that the value-space is not defined, so a generic OWL2 processor cannot compute ordering relationships of values of this type. # http://www.w3.org/2002/07/owl#Class DateTimeDescription: URIRef # Description of date and time structured with separate values for the various elements of a calendar-clock system. The temporal reference system is fixed to Gregorian Calendar, and the range of year, month, day properties restricted to corresponding XML Schema types xsd:gYear, xsd:gMonth and xsd:gDay, respectively. DateTimeInterval: URIRef # DateTimeInterval is a subclass of ProperInterval, defined using the multi-element DateTimeDescription. DayOfWeek: URIRef # The day of week Duration: URIRef # Duration of a temporal extent expressed as a number scaled by a temporal unit DurationDescription: URIRef # Description of temporal extent structured with separate values for the various elements of a calendar-clock system. The temporal reference system is fixed to Gregorian Calendar, and the range of each of the numeric properties is restricted to xsd:decimal GeneralDateTimeDescription: URIRef # Description of date and time structured with separate values for the various elements of a calendar-clock system GeneralDurationDescription: URIRef # Description of temporal extent structured with separate values for the various elements of a calendar-clock system. Instant: URIRef # A temporal entity with zero extent or duration Interval: URIRef # A temporal entity with an extent or duration MonthOfYear: URIRef # The month of the year ProperInterval: URIRef # A temporal entity with non-zero extent or duration, i.e. for which the value of the beginning and end are different TRS: URIRef # A temporal reference system, such as a temporal coordinate system (with an origin, direction, and scale), a calendar-clock combination, or a (possibly hierarchical) ordinal system. This is a stub class, representing the set of all temporal reference systems. TemporalDuration: URIRef # Time extent; duration of a time interval separate from its particular start position TemporalEntity: URIRef # A temporal interval or instant. TemporalPosition: URIRef # A position on a time-line TemporalUnit: URIRef # A standard duration, which provides a scale factor for a time extent, or the granularity or precision for a time position. TimePosition: URIRef # A temporal position described using either a (nominal) value from an ordinal reference system, or a (numeric) value in a temporal coordinate system. TimeZone: URIRef # A Time Zone specifies the amount by which the local time is offset from UTC. A time zone is usually denoted geographically (e.g. Australian Eastern Daylight Time), with a constant value in a given region. The region where it applies and the offset from UTC are specified by a locally recognised governing authority. # http://www.w3.org/2002/07/owl#DatatypeProperty day: URIRef # Day position in a calendar-clock system. The range of this property is not specified, so can be replaced by any specific representation of a calendar day from any calendar. dayOfYear: URIRef # The number of the day within the year days: URIRef # length of, or element of the length of, a temporal extent expressed in days hasXSDDuration: URIRef # Extent of a temporal entity, expressed using xsd:duration hour: URIRef # Hour position in a calendar-clock system. hours: URIRef # length of, or element of the length of, a temporal extent expressed in hours inXSDDate: URIRef # Position of an instant, expressed using xsd:date inXSDDateTimeStamp: URIRef # Position of an instant, expressed using xsd:dateTimeStamp inXSDgYear: URIRef # Position of an instant, expressed using xsd:gYear inXSDgYearMonth: URIRef # Position of an instant, expressed using xsd:gYearMonth minute: URIRef # Minute position in a calendar-clock system. minutes: URIRef # length, or element of, a temporal extent expressed in minutes month: URIRef # Month position in a calendar-clock system. The range of this property is not specified, so can be replaced by any specific representation of a calendar month from any calendar. months: URIRef # length of, or element of the length of, a temporal extent expressed in months nominalPosition: URIRef # The (nominal) value indicating temporal position in an ordinal reference system numericDuration: URIRef # Value of a temporal extent expressed as a decimal number scaled by a temporal unit numericPosition: URIRef # The (numeric) value indicating position within a temporal coordinate system second: URIRef # Second position in a calendar-clock system. seconds: URIRef # length of, or element of the length of, a temporal extent expressed in seconds week: URIRef # Week number within the year. weeks: URIRef # length of, or element of the length of, a temporal extent expressed in weeks year: URIRef # Year position in a calendar-clock system. The range of this property is not specified, so can be replaced by any specific representation of a calendar year from any calendar. years: URIRef # length of, or element of the length of, a temporal extent expressed in years # http://www.w3.org/2002/07/owl#DeprecatedClass January: URIRef # January Year: URIRef # Year duration # http://www.w3.org/2002/07/owl#DeprecatedProperty inXSDDateTime: URIRef # Position of an instant, expressed using xsd:dateTime xsdDateTime: URIRef # Value of DateTimeInterval expressed as a compact value. # http://www.w3.org/2002/07/owl#FunctionalProperty hasTRS: URIRef # The temporal reference system used by a temporal position or extent description. # http://www.w3.org/2002/07/owl#ObjectProperty after: URIRef # Gives directionality to time. If a temporal entity T1 is after another temporal entity T2, then the beginning of T1 is after the end of T2. dayOfWeek: URIRef # The day of week, whose value is a member of the class time:DayOfWeek hasBeginning: URIRef # Beginning of a temporal entity. hasDateTimeDescription: URIRef # Value of DateTimeInterval expressed as a structured value. The beginning and end of the interval coincide with the limits of the shortest element in the description. hasDuration: URIRef # Duration of a temporal entity, event or activity, or thing, expressed as a scaled value hasDurationDescription: URIRef # Duration of a temporal entity, expressed using a structured description hasEnd: URIRef # End of a temporal entity. hasTemporalDuration: URIRef # Duration of a temporal entity. hasTime: URIRef # Supports the association of a temporal entity (instant or interval) to any thing inDateTime: URIRef # Position of an instant, expressed using a structured description inTemporalPosition: URIRef # Position of a time instant inTimePosition: URIRef # Position of a time instant expressed as a TimePosition inside: URIRef # An instant that falls inside the interval. It is not intended to include beginnings and ends of intervals. intervalAfter: URIRef # If a proper interval T1 is intervalAfter another proper interval T2, then the beginning of T1 is after the end of T2. intervalBefore: URIRef # If a proper interval T1 is intervalBefore another proper interval T2, then the end of T1 is before the beginning of T2. intervalContains: URIRef # If a proper interval T1 is intervalContains another proper interval T2, then the beginning of T1 is before the beginning of T2, and the end of T1 is after the end of T2. intervalDisjoint: URIRef # If a proper interval T1 is intervalDisjoint another proper interval T2, then the beginning of T1 is after the end of T2, or the end of T1 is before the beginning of T2, i.e. the intervals do not overlap in any way, but their ordering relationship is not known. intervalDuring: URIRef # If a proper interval T1 is intervalDuring another proper interval T2, then the beginning of T1 is after the beginning of T2, and the end of T1 is before the end of T2. intervalEquals: URIRef # If a proper interval T1 is intervalEquals another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is coincident with the end of T2. intervalFinishedBy: URIRef # If a proper interval T1 is intervalFinishedBy another proper interval T2, then the beginning of T1 is before the beginning of T2, and the end of T1 is coincident with the end of T2. intervalFinishes: URIRef # If a proper interval T1 is intervalFinishes another proper interval T2, then the beginning of T1 is after the beginning of T2, and the end of T1 is coincident with the end of T2. intervalIn: URIRef # If a proper interval T1 is intervalIn another proper interval T2, then the beginning of T1 is after the beginning of T2 or is coincident with the beginning of T2, and the end of T1 is before the end of T2, or is coincident with the end of T2, except that end of T1 may not be coincident with the end of T2 if the beginning of T1 is coincident with the beginning of T2. intervalMeets: URIRef # If a proper interval T1 is intervalMeets another proper interval T2, then the end of T1 is coincident with the beginning of T2. intervalMetBy: URIRef # If a proper interval T1 is intervalMetBy another proper interval T2, then the beginning of T1 is coincident with the end of T2. intervalOverlappedBy: URIRef # If a proper interval T1 is intervalOverlappedBy another proper interval T2, then the beginning of T1 is after the beginning of T2, the beginning of T1 is before the end of T2, and the end of T1 is after the end of T2. intervalOverlaps: URIRef # If a proper interval T1 is intervalOverlaps another proper interval T2, then the beginning of T1 is before the beginning of T2, the end of T1 is after the beginning of T2, and the end of T1 is before the end of T2. intervalStartedBy: URIRef # If a proper interval T1 is intervalStarted another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is after the end of T2. intervalStarts: URIRef # If a proper interval T1 is intervalStarts another proper interval T2, then the beginning of T1 is coincident with the beginning of T2, and the end of T1 is before the end of T2. monthOfYear: URIRef # The month of the year, whose value is a member of the class time:MonthOfYear timeZone: URIRef # The time zone for clock elements in the temporal position unitType: URIRef # The temporal unit which provides the precision of a date-time value or scale of a temporal extent # http://www.w3.org/2002/07/owl#TransitiveProperty before: URIRef # Gives directionality to time. If a temporal entity T1 is before another temporal entity T2, then the end of T1 is before the beginning of T2. Thus, "before" can be considered to be basic to instants and derived for intervals. # http://www.w3.org/2006/time#DayOfWeek Friday: URIRef # Friday Monday: URIRef # Monday Saturday: URIRef # Saturday Sunday: URIRef # Sunday Thursday: URIRef # Thursday Tuesday: URIRef # Tuesday Wednesday: URIRef # Wednesday # http://www.w3.org/2006/time#TemporalUnit unitDay: URIRef # day unitHour: URIRef # hour unitMinute: URIRef # minute unitMonth: URIRef # month unitSecond: URIRef # second unitWeek: URIRef # week unitYear: URIRef # year _NS = Namespace("http://www.w3.org/2006/time#") rdflib-6.1.1/rdflib/namespace/_VANN.py000066400000000000000000000023201415774155300174430ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class VANN(DefinedNamespace): """ VANN: A vocabulary for annotating vocabulary descriptions This document describes a vocabulary for annotating descriptions of vocabularies with examples and usage notes. Generated from: https://vocab.org/vann/vann-vocab-20100607.rdf Date: 2020-05-26 14:21:15.580430 """ _fail = True # http://www.w3.org/2002/07/owl#AnnotationProperty changes: URIRef # A reference to a resource that describes changes between this version of a vocabulary and the previous. example: URIRef # A reference to a resource that provides an example of how this resource can be used. preferredNamespacePrefix: URIRef # The preferred namespace prefix to use when using terms from this vocabulary in an XML document. preferredNamespaceUri: URIRef # The preferred namespace URI to use when using terms from this vocabulary in an XML document. termGroup: URIRef # A group of related terms in a vocabulary. usageNote: URIRef # A reference to a resource that provides information on how this resource is to be used. _NS = Namespace("http://purl.org/vocab/vann/") rdflib-6.1.1/rdflib/namespace/_VOID.py000066400000000000000000000110411415774155300174420ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class VOID(DefinedNamespace): """ Vocabulary of Interlinked Datasets (VoID) The Vocabulary of Interlinked Datasets (VoID) is an RDF Schema vocabulary for expressing metadata about RDF datasets. It is intended as a bridge between the publishers and users of RDF data, with applications ranging from data discovery to cataloging and archiving of datasets. This document provides a formal definition of the new RDF classes and properties introduced for VoID. It is a companion to the main specification document for VoID, Describing Linked Datasets with the VoID Vocabulary. Generated from: http://rdfs.org/ns/void# Date: 2020-05-26 14:20:11.911298 """ _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property classPartition: URIRef # A subset of a void:Dataset that contains only the entities of a certain rdfs:Class. classes: URIRef # The total number of distinct classes in a void:Dataset. In other words, the number of distinct resources occurring as objects of rdf:type triples in the dataset. dataDump: URIRef # An RDF dump, partial or complete, of a void:Dataset. distinctObjects: URIRef # The total number of distinct objects in a void:Dataset. In other words, the number of distinct resources that occur in the object position of triples in the dataset. Literals are included in this count. distinctSubjects: URIRef # The total number of distinct subjects in a void:Dataset. In other words, the number of distinct resources that occur in the subject position of triples in the dataset. documents: URIRef # The total number of documents, for datasets that are published as a set of individual documents, such as RDF/XML documents or RDFa-annotated web pages. Non-RDF documents, such as web pages in HTML or images, are usually not included in this count. This property is intended for datasets where the total number of triples or entities is hard to determine. void:triples or void:entities should be preferred where practical. entities: URIRef # The total number of entities that are described in a void:Dataset. exampleResource: URIRef # example resource of dataset feature: URIRef # feature inDataset: URIRef # Points to the void:Dataset that a document is a part of. linkPredicate: URIRef # a link predicate objectsTarget: URIRef # The dataset describing the objects of the triples contained in the Linkset. openSearchDescription: URIRef # An OpenSearch description document for a free-text search service over a void:Dataset. properties: URIRef # The total number of distinct properties in a void:Dataset. In other words, the number of distinct resources that occur in the predicate position of triples in the dataset. property: URIRef # The rdf:Property that is the predicate of all triples in a property-based partition. propertyPartition: URIRef # A subset of a void:Dataset that contains only the triples of a certain rdf:Property. rootResource: URIRef # A top concept or entry point for a void:Dataset that is structured in a tree-like fashion. All resources in a dataset can be reached by following links from its root resources in a small number of steps. sparqlEndpoint: URIRef # has a SPARQL endpoint at subjectsTarget: URIRef # The dataset describing the subjects of triples contained in the Linkset. subset: URIRef # has subset target: URIRef # One of the two datasets linked by the Linkset. triples: URIRef # The total number of triples contained in a void:Dataset. uriLookupEndpoint: URIRef # Defines a simple URI look-up protocol for accessing a dataset. uriRegexPattern: URIRef # Defines a regular expression pattern matching URIs in the dataset. uriSpace: URIRef # A URI that is a common string prefix of all the entity URIs in a void:Dataset. vocabulary: URIRef # A vocabulary that is used in the dataset. # http://www.w3.org/2000/01/rdf-schema#Class Dataset: URIRef # A set of RDF triples that are published, maintained or aggregated by a single provider. DatasetDescription: URIRef # A web resource whose foaf:primaryTopic or foaf:topics include void:Datasets. Linkset: URIRef # A collection of RDF links between two void:Datasets. TechnicalFeature: URIRef # A technical feature of a void:Dataset, such as a supported RDF serialization format. # Valid non-python identifiers _extras = ["class"] _NS = Namespace("http://rdfs.org/ns/void#") rdflib-6.1.1/rdflib/namespace/_XSD.py000066400000000000000000000140641415774155300173470ustar00rootroot00000000000000from rdflib.term import URIRef from rdflib.namespace import DefinedNamespace, Namespace class XSD(DefinedNamespace): """ W3C XML Schema Definition Language (XSD) 1.1 Part 2: Datatypes Generated from: ../schemas/datatypes.xsd Date: 2021-09-05 20:37+10 """ _NS = Namespace("http://www.w3.org/2001/XMLSchema#") ENTITIES: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#ENTITIES ENTITY: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#ENTITY ID: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#ID IDREF: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#IDREF IDREFS: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#IDREFS NCName: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#NCName NMTOKEN: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#NMTOKEN NMTOKENS: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#NMTOKENS NOTATION: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#NOTATIONNOTATION cannot be used directly in a schema; rather a type Name: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#Name QName: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#QName anyURI: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#anyURI base64Binary: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#base64Binary boolean: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#boolean byte: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#byte date: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#date dateTime: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#dateTime dateTimeStamp: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#dateTimeStamp dayTimeDuration: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#dayTimeDuration decimal: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#decimal double: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#double duration: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#duration float: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#float gDay: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#gDay gMonth: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#gMonth gMonthDay: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#gMonthDay gYear: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#gYear gYearMonth: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#gYearMonth hexBinary: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#binary int: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#int integer: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#integer language: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#language long: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#long negativeInteger: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#negativeInteger nonNegativeInteger: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#nonNegativeInteger nonPositiveInteger: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#nonPositiveInteger normalizedString: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#normalizedString positiveInteger: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#positiveInteger short: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#short string: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#string time: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#time token: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#token unsignedByte: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#unsignedByte unsignedInt: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#unsignedInt unsignedLong: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#unsignedLong unsignedShort: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#unsignedShort yearMonthDuration: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#yearMonthDuration # fundamental facets - https://www.w3.org/TR/xmlschema11-2/#rf-fund-facets ordered: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-ordered bounded: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-bounded cardinality: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-cardinality numeric: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-numeric # constraining facets - https://www.w3.org/TR/xmlschema11-2/#rf-facets length: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-length minLength: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-minLength maxLength: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-maxLength pattern: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-pattern enumeration: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-enumeration whiteSpace: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-whiteSpace maxExclusive: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-maxExclusive maxInclusive: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-maxInclusive minExclusive: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-minExclusive minInclusive: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-minInclusive totalDigits: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-totalDigits fractionDigits: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-fractionDigits Assertions: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#rf-assertions explicitTimezone: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-explicitTimezone # The Seven-property Model - https://www.w3.org/TR/xmlschema11-2/#theSevenPropertyModel year: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#vp-dt-http://www.w3.org/TR/xmlschema11-2/#rf-whiteSpace month: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#vp-dt-month day: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#vp-dt-day hour: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#vp-dt-hour minute: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#vp-dt-minute second: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#vp-dt-second timezoneOffset: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#vp-dt-timezone rdflib-6.1.1/rdflib/namespace/__init__.py000066400000000000000000000573171415774155300203210ustar00rootroot00000000000000import logging import warnings from typing import List from unicodedata import category from pathlib import Path from urllib.parse import urldefrag from urllib.parse import urljoin from rdflib.term import URIRef, Variable, _is_valid_uri __doc__ = """ =================== Namespace Utilities =================== RDFLib provides mechanisms for managing Namespaces. In particular, there is a :class:`~rdflib.namespace.Namespace` class that takes as its argument the base URI of the namespace. .. code-block:: pycon >>> from rdflib.namespace import Namespace >>> RDFS = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") Fully qualified URIs in the namespace can be constructed either by attribute or by dictionary access on Namespace instances: .. code-block:: pycon >>> RDFS.seeAlso rdflib.term.URIRef('http://www.w3.org/1999/02/22-rdf-syntax-ns#seeAlso') >>> RDFS['seeAlso'] rdflib.term.URIRef('http://www.w3.org/1999/02/22-rdf-syntax-ns#seeAlso') Automatic handling of unknown predicates ----------------------------------------- As a programming convenience, a namespace binding is automatically created when :class:`rdflib.term.URIRef` predicates are added to the graph. Importable namespaces ----------------------- The following namespaces are available by directly importing from rdflib: * BRICK * CSVW * DC * DCAT * DCMITYPE * DCTERMS * DCAM * DOAP * FOAF * ODRL2 * ORG * OWL * PROF * PROV * QB * RDF * RDFS * SDO * SH * SKOS * SOSA * SSN * TIME * VANN * VOID * XSD .. code-block:: pycon >>> from rdflib.namespace import RDFS >>> RDFS.seeAlso rdflib.term.URIRef('http://www.w3.org/2000/01/rdf-schema#seeAlso') """ __all__ = ["is_ncname", "split_uri", "Namespace", "ClosedNamespace", "NamespaceManager"] logger = logging.getLogger(__name__) class Namespace(str): """ Utility class for quickly generating URIRefs with a common prefix >>> from rdflib.namespace import Namespace >>> n = Namespace("http://example.org/") >>> n.Person # as attribute rdflib.term.URIRef('http://example.org/Person') >>> n['first-name'] # as item - for things that are not valid python identifiers rdflib.term.URIRef('http://example.org/first-name') >>> n.Person in n True >>> n2 = Namespace("http://example2.org/") >>> n.Person in n2 False """ def __new__(cls, value): try: rt = str.__new__(cls, value) except UnicodeDecodeError: rt = str.__new__(cls, value, "utf-8") return rt @property def title(self): # Override for DCTERMS.title to return a URIRef instead of str.title method return URIRef(self + "title") def term(self, name): # need to handle slices explicitly because of __getitem__ override return URIRef(self + (name if isinstance(name, str) else "")) def __getitem__(self, key): return self.term(key) def __getattr__(self, name): if name.startswith("__"): # ignore any special Python names! raise AttributeError return self.term(name) def __repr__(self): return f"Namespace({super().__repr__()})" def __contains__(self, ref): """Allows to check if a URI is within (starts with) this Namespace. >>> from rdflib import URIRef >>> namespace = Namespace('http://example.org/') >>> uri = URIRef('http://example.org/foo') >>> uri in namespace True >>> person_class = namespace['Person'] >>> person_class in namespace True >>> obj = URIRef('http://not.example.org/bar') >>> obj in namespace False """ return ref.startswith(self) # test namespace membership with "ref in ns" syntax class URIPattern(str): """ Utility class for creating URIs according to some pattern This supports either new style formatting with .format or old-style with % operator >>> u=URIPattern("http://example.org/%s/%d/resource") >>> u%('books', 12345) rdflib.term.URIRef('http://example.org/books/12345/resource') """ def __new__(cls, value): try: rt = str.__new__(cls, value) except UnicodeDecodeError: rt = str.__new__(cls, value, "utf-8") return rt def __mod__(self, *args, **kwargs): return URIRef(super().__mod__(*args, **kwargs)) def format(self, *args, **kwargs): return URIRef(super().format(*args, **kwargs)) def __repr__(self): return f"URIPattern({super().__repr__()})" class DefinedNamespaceMeta(type): """ Utility metaclass for generating URIRefs with a common prefix """ _NS: Namespace _warn: bool = True _fail: bool = False # True means mimic ClosedNamespace _extras: List[str] = [] # List of non-pythonesque items _underscore_num: bool = False # True means pass "_n" constructs def __getitem__(cls, name, default=None): name = str(name) if str(name).startswith("__"): return super().__getitem__(name, default) if (cls._warn or cls._fail) and name not in cls: if cls._fail: raise AttributeError(f"term '{name}' not in namespace '{cls._NS}'") else: warnings.warn( f"Code: {name} is not defined in namespace {cls.__name__}", stacklevel=3, ) return cls._NS[name] def __getattr__(cls, name): return cls.__getitem__(name) def __repr__(cls): return f'Namespace("{cls._NS}")' def __str__(cls): return str(cls._NS) def __add__(cls, other): return cls.__getitem__(other) def __contains__(cls, item): """Determine whether a URI or an individual item belongs to this namespace""" item_str = str(item) if item_str.startswith("__"): return super().__contains__(item) if item_str.startswith(str(cls._NS)): item_str = item_str[len(str(cls._NS)) :] return any( item_str in c.__annotations__ or item_str in c._extras or (cls._underscore_num and item_str[0] == "_" and item_str[1:].isdigit()) for c in cls.mro() if issubclass(c, DefinedNamespace) ) class DefinedNamespace(metaclass=DefinedNamespaceMeta): """ A Namespace with an enumerated list of members. Warnings are emitted if unknown members are referenced if _warn is True """ def __init__(self): raise TypeError("namespace may not be instantiated") class ClosedNamespace(Namespace): """ A namespace with a closed list of members Trying to create terms not listed is an error """ def __new__(cls, uri, terms): rt = super().__new__(cls, uri) rt.__uris = {t: URIRef(rt + t) for t in terms} return rt @property def uri(self): # Back-compat return str(self) def term(self, name): uri = self.__uris.get(name) if uri is None: raise KeyError(f"term '{name}' not in namespace '{self}'") return uri def __getitem__(self, key): return self.term(key) def __getattr__(self, name): if name.startswith("__"): # ignore any special Python names! raise AttributeError else: try: return self.term(name) except KeyError as e: raise AttributeError(e) def __repr__(self): return f"{self.__module__}.{self.__class__.__name__}({str(self)!r})" def __dir__(self): return list(self.__uris) def __contains__(self, ref): return ( ref in self.__uris.values() ) # test namespace membership with "ref in ns" syntax def _ipython_key_completions_(self): return dir(self) XMLNS = Namespace("http://www.w3.org/XML/1998/namespace") class NamespaceManager(object): """ Class for managing prefix => namespace mappings Sample usage from FuXi ... .. code-block:: python ruleStore = N3RuleStore(additionalBuiltins=additionalBuiltins) nsMgr = NamespaceManager(Graph(ruleStore)) ruleGraph = Graph(ruleStore,namespace_manager=nsMgr) and ... .. code-block:: pycon >>> import rdflib >>> from rdflib import Graph >>> from rdflib.namespace import Namespace, NamespaceManager >>> exNs = Namespace('http://example.com/') >>> namespace_manager = NamespaceManager(Graph()) >>> namespace_manager.bind('ex', exNs, override=False) >>> g = Graph() >>> g.namespace_manager = namespace_manager >>> all_ns = [n for n in g.namespace_manager.namespaces()] >>> assert ('ex', rdflib.term.URIRef('http://example.com/')) in all_ns >>> """ def __init__(self, graph): self.graph = graph self.__cache = {} self.__cache_strict = {} self.__log = None self.__strie = {} self.__trie = {} for p, n in self.namespaces(): # self.bind is not always called insert_trie(self.__trie, str(n)) # DefinedNamespace bindings. self.bind("brick", BRICK) self.bind("csvw", CSVW) self.bind("dc", DC) self.bind("dcat", DCAT) self.bind("dcmitype", DCMITYPE) self.bind("dcterms", DCTERMS) self.bind("dcam", DCAM) self.bind("doap", DOAP) self.bind("foaf", FOAF) self.bind("odrl", ODRL2) self.bind("org", ORG) self.bind("owl", OWL) self.bind("prof", PROF) self.bind("prov", PROV) self.bind("qb", QB) self.bind("rdf", RDF) self.bind("rdfs", RDFS) self.bind("schema", SDO) self.bind("sh", SH) self.bind("skos", SKOS) self.bind("sosa", SOSA) self.bind("ssn", SSN) self.bind("time", TIME) self.bind("vann", VANN) self.bind("void", VOID) self.bind("xsd", XSD) # Namespace bindings. self.bind("xml", XMLNS) def __contains__(self, ref): # checks if a reference is in any of the managed namespaces with syntax # "ref in manager". Note that we don't use "ref in ns", as # NamespaceManager.namespaces() returns Iterator[Tuple[str, URIRef]] # rather than Iterator[Tuple[str, Namespace]] return any(ref.startswith(ns) for prefix, ns in self.namespaces()) def reset(self): self.__cache = {} self.__strie = {} self.__trie = {} for p, n in self.namespaces(): # repopulate the trie insert_trie(self.__trie, str(n)) @property def store(self): return self.graph.store def qname(self, uri): prefix, namespace, name = self.compute_qname(uri) if prefix == "": return name else: return ":".join((prefix, name)) def qname_strict(self, uri): prefix, namespace, name = self.compute_qname_strict(uri) if prefix == "": return name else: return ":".join((prefix, name)) def normalizeUri(self, rdfTerm) -> str: """ Takes an RDF Term and 'normalizes' it into a QName (using the registered prefix) or (unlike compute_qname) the Notation 3 form for URIs: <...URI...> """ try: namespace, name = split_uri(rdfTerm) if namespace not in self.__strie: insert_strie(self.__strie, self.__trie, str(namespace)) namespace = URIRef(str(namespace)) except: if isinstance(rdfTerm, Variable): return "?%s" % rdfTerm else: return "<%s>" % rdfTerm prefix = self.store.prefix(namespace) if prefix is None and isinstance(rdfTerm, Variable): return "?%s" % rdfTerm elif prefix is None: return "<%s>" % rdfTerm else: qNameParts = self.compute_qname(rdfTerm) return ":".join([qNameParts[0], qNameParts[-1]]) def compute_qname(self, uri, generate=True): if not _is_valid_uri(uri): raise ValueError( '"{}" does not look like a valid URI, cannot serialize this. Did you want to urlencode it?'.format( uri ) ) if uri not in self.__cache: try: namespace, name = split_uri(uri) except ValueError as e: namespace = URIRef(uri) prefix = self.store.prefix(namespace) if not prefix: raise e if namespace not in self.__strie: insert_strie(self.__strie, self.__trie, namespace) if self.__strie[namespace]: pl_namespace = get_longest_namespace(self.__strie[namespace], uri) if pl_namespace is not None: namespace = pl_namespace name = uri[len(namespace) :] namespace = URIRef(namespace) prefix = self.store.prefix(namespace) # warning multiple prefixes problem if prefix is None: if not generate: raise KeyError( "No known prefix for {} and generate=False".format(namespace) ) num = 1 while 1: prefix = "ns%s" % num if not self.store.namespace(prefix): break num += 1 self.bind(prefix, namespace) self.__cache[uri] = (prefix, namespace, name) return self.__cache[uri] def compute_qname_strict(self, uri, generate=True): # code repeated to avoid branching on strict every time # if output needs to be strict (e.g. for xml) then # only the strict output should bear the overhead prefix, namespace, name = self.compute_qname(uri) if is_ncname(str(name)): return prefix, namespace, name else: if uri not in self.__cache_strict: try: namespace, name = split_uri(uri, NAME_START_CATEGORIES) except ValueError as e: message = ( "This graph cannot be serialized to a strict format " "because there is no valid way to shorten {}".format(uri) ) raise ValueError(message) # omitted for strict since NCNames cannot be empty # namespace = URIRef(uri) # prefix = self.store.prefix(namespace) # if not prefix: # raise e if namespace not in self.__strie: insert_strie(self.__strie, self.__trie, namespace) # omitted for strict # if self.__strie[namespace]: # pl_namespace = get_longest_namespace(self.__strie[namespace], uri) # if pl_namespace is not None: # namespace = pl_namespace # name = uri[len(namespace):] namespace = URIRef(namespace) prefix = self.store.prefix( namespace ) # warning multiple prefixes problem if prefix is None: if not generate: raise KeyError( "No known prefix for {} and generate=False".format( namespace ) ) num = 1 while 1: prefix = "ns%s" % num if not self.store.namespace(prefix): break num += 1 self.bind(prefix, namespace) self.__cache_strict[uri] = (prefix, namespace, name) return self.__cache_strict[uri] def bind(self, prefix, namespace, override=True, replace=False): """bind a given namespace to the prefix if override, rebind, even if the given namespace is already bound to another prefix. if replace, replace any existing prefix with the new namespace """ namespace = URIRef(str(namespace)) # When documenting explain that override only applies in what cases if prefix is None: prefix = "" elif " " in prefix: raise KeyError("Prefixes may not contain spaces.") bound_namespace = self.store.namespace(prefix) # Check if the bound_namespace contains a URI # and if so convert it into a URIRef for comparison # This is to prevent duplicate namespaces with the # same URI if bound_namespace: bound_namespace = URIRef(bound_namespace) if bound_namespace and bound_namespace != namespace: if replace: self.store.bind(prefix, namespace) insert_trie(self.__trie, str(namespace)) return # prefix already in use for different namespace # # append number to end of prefix until we find one # that's not in use. if not prefix: prefix = "default" num = 1 while 1: new_prefix = "%s%s" % (prefix, num) tnamespace = self.store.namespace(new_prefix) if tnamespace and namespace == URIRef(tnamespace): # the prefix is already bound to the correct # namespace return if not self.store.namespace(new_prefix): break num += 1 self.store.bind(new_prefix, namespace) else: bound_prefix = self.store.prefix(namespace) if bound_prefix is None: self.store.bind(prefix, namespace) elif bound_prefix == prefix: pass # already bound else: if override or bound_prefix.startswith("_"): # or a generated prefix self.store.bind(prefix, namespace) insert_trie(self.__trie, str(namespace)) def namespaces(self): for prefix, namespace in self.store.namespaces(): namespace = URIRef(namespace) yield prefix, namespace def absolutize(self, uri, defrag=1): base = Path.cwd().as_uri() result = urljoin("%s/" % base, uri, allow_fragments=not defrag) if defrag: result = urldefrag(result)[0] if not defrag: if uri and uri[-1] == "#" and result[-1] != "#": result = "%s#" % result return URIRef(result) # From: http://www.w3.org/TR/REC-xml#NT-CombiningChar # # * Name start characters must have one of the categories Ll, Lu, Lo, # Lt, Nl. # # * Name characters other than Name-start characters must have one of # the categories Mc, Me, Mn, Lm, or Nd. # # * Characters in the compatibility area (i.e. with character code # greater than #xF900 and less than #xFFFE) are not allowed in XML # names. # # * Characters which have a font or compatibility decomposition # (i.e. those with a "compatibility formatting tag" in field 5 of the # database -- marked by field 5 beginning with a "<") are not allowed. # # * The following characters are treated as name-start characters rather # than name characters, because the property file classifies them as # Alphabetic: [#x02BB-#x02C1], #x0559, #x06E5, #x06E6. # # * Characters #x20DD-#x20E0 are excluded (in accordance with Unicode # 2.0, section 5.14). # # * Character #x00B7 is classified as an extender, because the property # list so identifies it. # # * Character #x0387 is added as a name character, because #x00B7 is its # canonical equivalent. # # * Characters ':' and '_' are allowed as name-start characters. # # * Characters '-' and '.' are allowed as name characters. NAME_START_CATEGORIES = ["Ll", "Lu", "Lo", "Lt", "Nl"] SPLIT_START_CATEGORIES = NAME_START_CATEGORIES + ["Nd"] NAME_CATEGORIES = NAME_START_CATEGORIES + ["Mc", "Me", "Mn", "Lm", "Nd"] ALLOWED_NAME_CHARS = ["\u00B7", "\u0387", "-", ".", "_", "%", "(", ")"] # http://www.w3.org/TR/REC-xml-names/#NT-NCName # [4] NCName ::= (Letter | '_') (NCNameChar)* /* An XML Name, minus # the ":" */ # [5] NCNameChar ::= Letter | Digit | '.' | '-' | '_' | CombiningChar # | Extender def is_ncname(name): if name: first = name[0] if first == "_" or category(first) in NAME_START_CATEGORIES: for i in range(1, len(name)): c = name[i] if not category(c) in NAME_CATEGORIES: if c in ALLOWED_NAME_CHARS: continue return 0 # if in compatibility area # if decomposition(c)!='': # return 0 return 1 return 0 def split_uri(uri, split_start=SPLIT_START_CATEGORIES): if uri.startswith(XMLNS): return (XMLNS, uri.split(XMLNS)[1]) length = len(uri) for i in range(0, length): c = uri[-i - 1] if not category(c) in NAME_CATEGORIES: if c in ALLOWED_NAME_CHARS: continue for j in range(-1 - i, length): if category(uri[j]) in split_start or uri[j] == "_": # _ prevents early split, roundtrip not generate ns = uri[:j] if not ns: break ln = uri[j:] return (ns, ln) break raise ValueError("Can't split '{}'".format(uri)) def insert_trie(trie, value): # aka get_subtrie_or_insert """Insert a value into the trie if it is not already contained in the trie. Return the subtree for the value regardless of whether it is a new value or not.""" if value in trie: return trie[value] multi_check = False for key in tuple(trie.keys()): if len(value) > len(key) and value.startswith(key): return insert_trie(trie[key], value) elif key.startswith(value): # we know the value is not in the trie if not multi_check: trie[value] = {} multi_check = True # there can be multiple longer existing prefixes dict_ = trie.pop( key ) # does not break strie since key<->dict_ remains unchanged trie[value][key] = dict_ if value not in trie: trie[value] = {} return trie[value] def insert_strie(strie, trie, value): if value not in strie: strie[value] = insert_trie(trie, value) def get_longest_namespace(trie, value): for key in trie: if value.startswith(key): out = get_longest_namespace(trie[key], value) if out is None: return key else: return out return None from rdflib.namespace._BRICK import BRICK from rdflib.namespace._CSVW import CSVW from rdflib.namespace._DC import DC from rdflib.namespace._DCAT import DCAT from rdflib.namespace._DCMITYPE import DCMITYPE from rdflib.namespace._DCTERMS import DCTERMS from rdflib.namespace._DCAM import DCAM from rdflib.namespace._DOAP import DOAP from rdflib.namespace._FOAF import FOAF from rdflib.namespace._ODRL2 import ODRL2 from rdflib.namespace._ORG import ORG from rdflib.namespace._OWL import OWL from rdflib.namespace._PROF import PROF from rdflib.namespace._PROV import PROV from rdflib.namespace._QB import QB from rdflib.namespace._RDF import RDF from rdflib.namespace._RDFS import RDFS from rdflib.namespace._SDO import SDO from rdflib.namespace._SH import SH from rdflib.namespace._SKOS import SKOS from rdflib.namespace._SOSA import SOSA from rdflib.namespace._SSN import SSN from rdflib.namespace._TIME import TIME from rdflib.namespace._VANN import VANN from rdflib.namespace._VOID import VOID from rdflib.namespace._XSD import XSD rdflib-6.1.1/rdflib/parser.py000066400000000000000000000273371415774155300161210ustar00rootroot00000000000000""" Parser plugin interface. This module defines the parser plugin interface and contains other related parser support code. The module is mainly useful for those wanting to write a parser that can plugin to rdflib. If you are wanting to invoke a parser you likely want to do so through the Graph class parse method. """ import codecs import os import pathlib import sys from io import BytesIO, TextIOBase, TextIOWrapper, StringIO, BufferedIOBase from typing import Any, Dict, Optional, Union from urllib.request import Request from urllib.request import url2pathname from urllib.request import urlopen from urllib.error import HTTPError from xml.sax import xmlreader from rdflib import __version__ from rdflib.term import URIRef from rdflib.namespace import Namespace __all__ = [ "Parser", "InputSource", "StringInputSource", "URLInputSource", "FileInputSource", "PythonInputSource", ] class Parser(object): __slots__ = () def __init__(self): pass def parse(self, source, sink): pass class BytesIOWrapper(BufferedIOBase): __slots__ = ("wrapped", "encoded", "encoding") def __init__(self, wrapped: str, encoding="utf-8"): super(BytesIOWrapper, self).__init__() self.wrapped = wrapped self.encoding = encoding self.encoded = None def read(self, *args, **kwargs): if self.encoded is None: b, blen = codecs.getencoder(self.encoding)(self.wrapped) self.encoded = BytesIO(b) return self.encoded.read(*args, **kwargs) def read1(self, *args, **kwargs): if self.encoded is None: b = codecs.getencoder(self.encoding)(self.wrapped) self.encoded = BytesIO(b) return self.encoded.read1(*args, **kwargs) def readinto(self, *args, **kwargs): raise NotImplementedError() def readinto1(self, *args, **kwargs): raise NotImplementedError() def write(self, *args, **kwargs): raise NotImplementedError() class InputSource(xmlreader.InputSource, object): """ TODO: """ def __init__(self, system_id=None): xmlreader.InputSource.__init__(self, system_id=system_id) self.content_type = None self.auto_close = False # see Graph.parse(), true if opened by us def close(self): c = self.getCharacterStream() if c and hasattr(c, "close"): try: c.close() except Exception: pass f = self.getByteStream() if f and hasattr(f, "close"): try: f.close() except Exception: pass class PythonInputSource(InputSource): """ Constructs an RDFLib Parser InputSource from a Python data structure, for example, loaded from JSON with json.load or json.loads: >>> import json >>> as_string = \"\"\"{ ... "@context" : {"ex" : "http://example.com/ns#"}, ... "@graph": [{"@type": "ex:item", "@id": "#example"}] ... }\"\"\" >>> as_python = json.loads(as_string) >>> source = create_input_source(data=as_python) >>> isinstance(source, PythonInputSource) True """ def __init__(self, data, system_id=None): self.content_type = None self.auto_close = False # see Graph.parse(), true if opened by us self.public_id = None self.system_id = system_id self.data = data def getPublicId(self): return self.public_id def setPublicId(self, public_id): self.public_id = public_id def getSystemId(self): return self.system_id def setSystemId(self, system_id): self.system_id = system_id def close(self): self.data = None class StringInputSource(InputSource): """ Constructs an RDFLib Parser InputSource from a Python String or Bytes """ def __init__(self, value, encoding="utf-8", system_id=None): super(StringInputSource, self).__init__(system_id) if isinstance(value, str): stream = StringIO(value) self.setCharacterStream(stream) self.setEncoding(encoding) b_stream = BytesIOWrapper(value, encoding) self.setByteStream(b_stream) else: stream = BytesIO(value) self.setByteStream(stream) c_stream = TextIOWrapper(stream, encoding) self.setCharacterStream(c_stream) self.setEncoding(c_stream.encoding) headers = { "User-agent": "rdflib-%s (http://rdflib.net/; eikeon@eikeon.com)" % __version__ } class URLInputSource(InputSource): """ TODO: """ def __init__(self, system_id=None, format=None): super(URLInputSource, self).__init__(system_id) self.url = system_id # copy headers to change myheaders = dict(headers) if format == "application/rdf+xml": myheaders["Accept"] = "application/rdf+xml, */*;q=0.1" elif format == "n3": myheaders["Accept"] = "text/n3, */*;q=0.1" elif format == "turtle": myheaders["Accept"] = "text/turtle,application/x-turtle, */*;q=0.1" elif format == "nt": myheaders["Accept"] = "text/plain, */*;q=0.1" elif format == "json-ld": myheaders[ "Accept" ] = "application/ld+json, application/json;q=0.9, */*;q=0.1" else: myheaders["Accept"] = ( "application/rdf+xml,text/rdf+n3;q=0.9," + "application/xhtml+xml;q=0.5, */*;q=0.1" ) req = Request(system_id, None, myheaders) def _urlopen(req: Request): try: return urlopen(req) except HTTPError as ex: # 308 (Permanent Redirect) is not supported by current python version(s) # See https://bugs.python.org/issue40321 # This custom error handling should be removed once all # supported versions of python support 308. if ex.code == 308: req.full_url = ex.headers.get("Location") return _urlopen(req) else: raise file = _urlopen(req) # Fix for issue 130 https://github.com/RDFLib/rdflib/issues/130 self.url = file.geturl() # in case redirections took place self.setPublicId(self.url) self.content_type = file.info().get("content-type") if self.content_type is not None: self.content_type = self.content_type.split(";", 1)[0] self.setByteStream(file) # TODO: self.setEncoding(encoding) self.response_info = file.info() # a mimetools.Message instance def __repr__(self): return self.url class FileInputSource(InputSource): def __init__(self, file): base = pathlib.Path.cwd().as_uri() system_id = URIRef(pathlib.Path(file.name).absolute().as_uri(), base=base) super(FileInputSource, self).__init__(system_id) self.file = file if isinstance(file, TextIOBase): # Python3 unicode fp self.setCharacterStream(file) self.setEncoding(file.encoding) try: b = file.buffer self.setByteStream(b) except (AttributeError, LookupError): self.setByteStream(file) else: self.setByteStream(file) # We cannot set characterStream here because # we do not know the Raw Bytes File encoding. def __repr__(self): return repr(self.file) def create_input_source( source=None, publicID=None, location=None, file=None, data: Optional[Union[str, bytes, bytearray, Dict[Any, Any]]] = None, format=None, ): """ Return an appropriate InputSource instance for the given parameters. """ # test that exactly one of source, location, file, and data is not None. non_empty_arguments = list( filter( lambda v: v is not None, [source, location, file, data], ) ) if len(non_empty_arguments) != 1: raise ValueError( "exactly one of source, location, file or data must be given", ) input_source = None if source is not None: if isinstance(source, InputSource): input_source = source else: if isinstance(source, str): location = source elif isinstance(source, pathlib.PurePath): location = str(source) elif isinstance(source, bytes): data = source elif hasattr(source, "read") and not isinstance(source, Namespace): f = source input_source = InputSource() if hasattr(source, "encoding"): input_source.setCharacterStream(source) input_source.setEncoding(source.encoding) try: b = file.buffer input_source.setByteStream(b) except (AttributeError, LookupError): input_source.setByteStream(source) else: input_source.setByteStream(f) if f is sys.stdin: input_source.setSystemId("file:///dev/stdin") elif hasattr(f, "name"): input_source.setSystemId(f.name) else: raise Exception( "Unexpected type '%s' for source '%s'" % (type(source), source) ) absolute_location = None # Further to fix for issue 130 auto_close = False # make sure we close all file handles we open if location is not None: ( absolute_location, auto_close, file, input_source, ) = _create_input_source_from_location( file=file, format=format, input_source=input_source, location=location, ) if file is not None: input_source = FileInputSource(file) if data is not None: if isinstance(data, dict): input_source = PythonInputSource(data) auto_close = True elif isinstance(data, (str, bytes, bytearray)): input_source = StringInputSource(data) auto_close = True else: raise RuntimeError(f"parse data can only str, or bytes. not: {type(data)}") if input_source is None: raise Exception("could not create InputSource") else: input_source.auto_close |= auto_close if publicID is not None: # Further to fix for issue 130 input_source.setPublicId(publicID) # Further to fix for issue 130 elif input_source.getPublicId() is None: input_source.setPublicId(absolute_location or "") return input_source def _create_input_source_from_location(file, format, input_source, location): # Fix for Windows problem https://github.com/RDFLib/rdflib/issues/145 and # https://github.com/RDFLib/rdflib/issues/1430 # NOTE: using pathlib.Path.exists on a URL fails on windows as it is not a # valid path. However os.path.exists() returns false for a URL on windows # which is why it is being used instead. if os.path.exists(location): location = pathlib.Path(location).absolute().as_uri() base = pathlib.Path.cwd().as_uri() absolute_location = URIRef(location, base=base) if absolute_location.startswith("file:///"): filename = url2pathname(absolute_location.replace("file:///", "/")) file = open(filename, "rb") else: input_source = URLInputSource(absolute_location, format) auto_close = True # publicID = publicID or absolute_location # Further to fix # for issue 130 return absolute_location, auto_close, file, input_source rdflib-6.1.1/rdflib/paths.py000066400000000000000000000406751415774155300157440ustar00rootroot00000000000000__doc__ = r""" This module implements the SPARQL 1.1 Property path operators, as defined in: http://www.w3.org/TR/sparql11-query/#propertypaths In SPARQL the syntax is as follows: +--------------------+-------------------------------------------------+ |Syntax | Matches | +====================+=================================================+ |iri | An IRI. A path of length one. | +--------------------+-------------------------------------------------+ |^elt | Inverse path (object to subject). | +--------------------+-------------------------------------------------+ |elt1 / elt2 | A sequence path of elt1 followed by elt2. | +--------------------+-------------------------------------------------+ |elt1 | elt2 | A alternative path of elt1 or elt2 | | | (all possibilities are tried). | +--------------------+-------------------------------------------------+ |elt* | A path that connects the subject and object | | | of the path by zero or more matches of elt. | +--------------------+-------------------------------------------------+ |elt+ | A path that connects the subject and object | | | of the path by one or more matches of elt. | +--------------------+-------------------------------------------------+ |elt? | A path that connects the subject and object | | | of the path by zero or one matches of elt. | +--------------------+-------------------------------------------------+ |!iri or | Negated property set. An IRI which is not one of| |!(iri\ :sub:`1`\ \| | iri\ :sub:`1`...iri\ :sub:`n`. | |... \|iri\ :sub:`n`)| !iri is short for !(iri). | +--------------------+-------------------------------------------------+ |!^iri or | Negated property set where the excluded matches | |!(^iri\ :sub:`1`\ \|| are based on reversed path. That is, not one of | |...\|^iri\ :sub:`n`)| iri\ :sub:`1`...iri\ :sub:`n` as reverse paths. | | | !^iri is short for !(^iri). | +--------------------+-------------------------------------------------+ |!(iri\ :sub:`1`\ \| | A combination of forward and reverse | |...\|iri\ :sub:`j`\ | properties in a negated property set. | |\|^iri\ :sub:`j+1`\ | | |\|... \|^iri\ | | |:sub:`n`)| | | +--------------------+-------------------------------------------------+ |(elt) | A group path elt, brackets control precedence. | +--------------------+-------------------------------------------------+ This module is used internally by the SPARQL engine, but the property paths can also be used to query RDFLib Graphs directly. Where possible the SPARQL syntax is mapped to Python operators, and property path objects can be constructed from existing URIRefs. >>> from rdflib import Graph, Namespace >>> from rdflib.namespace import FOAF >>> ~FOAF.knows Path(~http://xmlns.com/foaf/0.1/knows) >>> FOAF.knows/FOAF.name Path(http://xmlns.com/foaf/0.1/knows / http://xmlns.com/foaf/0.1/name) >>> FOAF.name|FOAF.givenName Path(http://xmlns.com/foaf/0.1/name | http://xmlns.com/foaf/0.1/givenName) Modifiers (?, *, +) are done using * (the multiplication operator) and the strings '*', '?', '+', also defined as constants in this file. >>> FOAF.knows*OneOrMore Path(http://xmlns.com/foaf/0.1/knows+) The path objects can also be used with the normal graph methods. First some example data: >>> g=Graph() >>> g=g.parse(data=''' ... @prefix : . ... ... :a :p1 :c ; :p2 :f . ... :c :p2 :e ; :p3 :g . ... :g :p3 :h ; :p2 :j . ... :h :p3 :a ; :p2 :g . ... ... :q :px :q . ... ... ''', format='n3') # doctest: +ELLIPSIS >>> e = Namespace('ex:') Graph contains: >>> (e.a, e.p1/e.p2, e.e) in g True Graph generator functions, triples, subjects, objects, etc. : >>> list(g.objects(e.c, (e.p3*OneOrMore)/e.p2)) # doctest: +NORMALIZE_WHITESPACE [rdflib.term.URIRef('ex:j'), rdflib.term.URIRef('ex:g'), rdflib.term.URIRef('ex:f')] A more complete set of tests: >>> list(evalPath(g, (None, e.p1/e.p2, None)))==[(e.a, e.e)] True >>> list(evalPath(g, (e.a, e.p1|e.p2, None)))==[(e.a,e.c), (e.a,e.f)] True >>> list(evalPath(g, (e.c, ~e.p1, None))) == [ (e.c, e.a) ] True >>> list(evalPath(g, (e.a, e.p1*ZeroOrOne, None))) == [(e.a, e.a), (e.a, e.c)] True >>> list(evalPath(g, (e.c, e.p3*OneOrMore, None))) == [ ... (e.c, e.g), (e.c, e.h), (e.c, e.a)] True >>> list(evalPath(g, (e.c, e.p3*ZeroOrMore, None))) == [(e.c, e.c), ... (e.c, e.g), (e.c, e.h), (e.c, e.a)] True >>> list(evalPath(g, (e.a, -e.p1, None))) == [(e.a, e.f)] True >>> list(evalPath(g, (e.a, -(e.p1|e.p2), None))) == [] True >>> list(evalPath(g, (e.g, -~e.p2, None))) == [(e.g, e.j)] True >>> list(evalPath(g, (e.e, ~(e.p1/e.p2), None))) == [(e.e, e.a)] True >>> list(evalPath(g, (e.a, e.p1/e.p3/e.p3, None))) == [(e.a, e.h)] True >>> list(evalPath(g, (e.q, e.px*OneOrMore, None))) [(rdflib.term.URIRef('ex:q'), rdflib.term.URIRef('ex:q'))] >>> list(evalPath(g, (None, e.p1|e.p2, e.c))) [(rdflib.term.URIRef('ex:a'), rdflib.term.URIRef('ex:c'))] >>> list(evalPath(g, (None, ~e.p1, e.a))) == [ (e.c, e.a) ] True >>> list(evalPath(g, (None, e.p1*ZeroOrOne, e.c))) # doctest: +NORMALIZE_WHITESPACE [(rdflib.term.URIRef('ex:c'), rdflib.term.URIRef('ex:c')), (rdflib.term.URIRef('ex:a'), rdflib.term.URIRef('ex:c'))] >>> list(evalPath(g, (None, e.p3*OneOrMore, e.a))) # doctest: +NORMALIZE_WHITESPACE [(rdflib.term.URIRef('ex:h'), rdflib.term.URIRef('ex:a')), (rdflib.term.URIRef('ex:g'), rdflib.term.URIRef('ex:a')), (rdflib.term.URIRef('ex:c'), rdflib.term.URIRef('ex:a'))] >>> list(evalPath(g, (None, e.p3*ZeroOrMore, e.a))) # doctest: +NORMALIZE_WHITESPACE [(rdflib.term.URIRef('ex:a'), rdflib.term.URIRef('ex:a')), (rdflib.term.URIRef('ex:h'), rdflib.term.URIRef('ex:a')), (rdflib.term.URIRef('ex:g'), rdflib.term.URIRef('ex:a')), (rdflib.term.URIRef('ex:c'), rdflib.term.URIRef('ex:a'))] >>> list(evalPath(g, (None, -e.p1, e.f))) == [(e.a, e.f)] True >>> list(evalPath(g, (None, -(e.p1|e.p2), e.c))) == [] True >>> list(evalPath(g, (None, -~e.p2, e.j))) == [(e.g, e.j)] True >>> list(evalPath(g, (None, ~(e.p1/e.p2), e.a))) == [(e.e, e.a)] True >>> list(evalPath(g, (None, e.p1/e.p3/e.p3, e.h))) == [(e.a, e.h)] True >>> list(evalPath(g, (e.q, e.px*OneOrMore, None))) [(rdflib.term.URIRef('ex:q'), rdflib.term.URIRef('ex:q'))] >>> list(evalPath(g, (e.c, (e.p2|e.p3)*ZeroOrMore, e.j))) [(rdflib.term.URIRef('ex:c'), rdflib.term.URIRef('ex:j'))] No vars specified: >>> sorted(list(evalPath(g, (None, e.p3*OneOrMore, None)))) #doctest: +NORMALIZE_WHITESPACE [(rdflib.term.URIRef('ex:c'), rdflib.term.URIRef('ex:a')), (rdflib.term.URIRef('ex:c'), rdflib.term.URIRef('ex:g')), (rdflib.term.URIRef('ex:c'), rdflib.term.URIRef('ex:h')), (rdflib.term.URIRef('ex:g'), rdflib.term.URIRef('ex:a')), (rdflib.term.URIRef('ex:g'), rdflib.term.URIRef('ex:h')), (rdflib.term.URIRef('ex:h'), rdflib.term.URIRef('ex:a'))] """ from rdflib.term import URIRef, Node from typing import Union, Callable # property paths ZeroOrMore = "*" OneOrMore = "+" ZeroOrOne = "?" class Path(object): __or__: Callable[["Path", Union["URIRef", "Path"]], "AlternativePath"] __invert__: Callable[["Path"], "InvPath"] __neg__: Callable[["Path"], "NegatedPath"] __truediv__: Callable[["Path", Union["URIRef", "Path"]], "SequencePath"] __mul__: Callable[["Path", str], "MulPath"] def eval(self, graph, subj=None, obj=None): raise NotImplementedError() def __hash__(self): return hash(repr(self)) def __eq__(self, other): return repr(self) == repr(other) def __lt__(self, other): if not isinstance(other, (Path, Node)): raise TypeError( "unorderable types: %s() < %s()" % (repr(self), repr(other)) ) return repr(self) < repr(other) def __le__(self, other): if not isinstance(other, (Path, Node)): raise TypeError( "unorderable types: %s() < %s()" % (repr(self), repr(other)) ) return repr(self) <= repr(other) def __ne__(self, other): return not self == other def __gt__(self, other): return not self <= other def __ge__(self, other): return not self < other class InvPath(Path): def __init__(self, arg): self.arg = arg def eval(self, graph, subj=None, obj=None): for s, o in evalPath(graph, (obj, self.arg, subj)): yield o, s def __repr__(self): return "Path(~%s)" % (self.arg,) def n3(self): return "^%s" % self.arg.n3() class SequencePath(Path): def __init__(self, *args): self.args = [] for a in args: if isinstance(a, SequencePath): self.args += a.args else: self.args.append(a) def eval(self, graph, subj=None, obj=None): def _eval_seq(paths, subj, obj): if paths[1:]: for s, o in evalPath(graph, (subj, paths[0], None)): for r in _eval_seq(paths[1:], o, obj): yield s, r[1] else: for s, o in evalPath(graph, (subj, paths[0], obj)): yield s, o def _eval_seq_bw(paths, subj, obj): if paths[:-1]: for s, o in evalPath(graph, (None, paths[-1], obj)): for r in _eval_seq(paths[:-1], subj, s): yield r[0], o else: for s, o in evalPath(graph, (subj, paths[0], obj)): yield s, o if subj: return _eval_seq(self.args, subj, obj) elif obj: return _eval_seq_bw(self.args, subj, obj) else: # no vars bound, we can start anywhere return _eval_seq(self.args, subj, obj) def __repr__(self): return "Path(%s)" % " / ".join(str(x) for x in self.args) def n3(self): return "/".join(a.n3() for a in self.args) class AlternativePath(Path): def __init__(self, *args): self.args = [] for a in args: if isinstance(a, AlternativePath): self.args += a.args else: self.args.append(a) def eval(self, graph, subj=None, obj=None): for x in self.args: for y in evalPath(graph, (subj, x, obj)): yield y def __repr__(self): return "Path(%s)" % " | ".join(str(x) for x in self.args) def n3(self): return "|".join(a.n3() for a in self.args) class MulPath(Path): def __init__(self, path, mod): self.path = path self.mod = mod if mod == ZeroOrOne: self.zero = True self.more = False elif mod == ZeroOrMore: self.zero = True self.more = True elif mod == OneOrMore: self.zero = False self.more = True else: raise Exception("Unknown modifier %s" % mod) def eval(self, graph, subj=None, obj=None, first=True): if self.zero and first: if subj and obj: if subj == obj: yield subj, obj elif subj: yield subj, subj elif obj: yield obj, obj def _fwd(subj=None, obj=None, seen=None): seen.add(subj) for s, o in evalPath(graph, (subj, self.path, None)): if not obj or o == obj: yield s, o if self.more: if o in seen: continue for s2, o2 in _fwd(o, obj, seen): yield s, o2 def _bwd(subj=None, obj=None, seen=None): seen.add(obj) for s, o in evalPath(graph, (None, self.path, obj)): if not subj or subj == s: yield s, o if self.more: if s in seen: continue for s2, o2 in _bwd(None, s, seen): yield s2, o def _all_fwd_paths(): if self.zero: seen1 = set() # According to the spec, ALL nodes are possible solutions # (even literals) # we cannot do this without going through ALL triples # unless we keep an index of all terms somehow # but let's just hope this query doesn't happen very often... for s, o in graph.subject_objects(None): if s not in seen1: seen1.add(s) yield s, s if o not in seen1: seen1.add(o) yield o, o seen = set() for s, o in evalPath(graph, (None, self.path, None)): if not self.more: yield s, o else: if s not in seen: seen.add(s) f = list(_fwd(s, None, set())) for s1, o1 in f: assert s1 == s yield s1, o1 done = set() # the spec does, by defn, not allow duplicates if subj: for x in _fwd(subj, obj, set()): if x not in done: done.add(x) yield x elif obj: for x in _bwd(subj, obj, set()): if x not in done: done.add(x) yield x else: for x in _all_fwd_paths(): if x not in done: done.add(x) yield x def __repr__(self): return "Path(%s%s)" % (self.path, self.mod) def n3(self): return "%s%s" % (self.path.n3(), self.mod) class NegatedPath(Path): def __init__(self, arg): if isinstance(arg, (URIRef, InvPath)): self.args = [arg] elif isinstance(arg, AlternativePath): self.args = arg.args else: raise Exception( "Can only negate URIRefs, InvPaths or " + "AlternativePaths, not: %s" % (arg,) ) def eval(self, graph, subj=None, obj=None): for s, p, o in graph.triples((subj, None, obj)): for a in self.args: if isinstance(a, URIRef): if p == a: break elif isinstance(a, InvPath): if (o, a.arg, s) in graph: break else: raise Exception("Invalid path in NegatedPath: %s" % a) else: yield s, o def __repr__(self): return "Path(! %s)" % ",".join(str(x) for x in self.args) def n3(self): return "!(%s)" % ("|".join(self.args)) class PathList(list): pass def path_alternative(self, other): """ alternative path """ if not isinstance(other, (URIRef, Path)): raise Exception("Only URIRefs or Paths can be in paths!") return AlternativePath(self, other) def path_sequence(self, other): """ sequence path """ if not isinstance(other, (URIRef, Path)): raise Exception("Only URIRefs or Paths can be in paths!") return SequencePath(self, other) def evalPath(graph, t): return ((s, o) for s, p, o in graph.triples(t)) def mul_path(p, mul): """ cardinality path """ return MulPath(p, mul) def inv_path(p): """ inverse path """ return InvPath(p) def neg_path(p): """ negated path """ return NegatedPath(p) if __name__ == "__main__": import doctest doctest.testmod() else: # monkey patch # (these cannot be directly in terms.py # as it would introduce circular imports) URIRef.__or__ = path_alternative # ignore typing here as URIRef inherits from str, # which has an incompatible definition of __mul__. URIRef.__mul__ = mul_path # type: ignore URIRef.__invert__ = inv_path URIRef.__neg__ = neg_path URIRef.__truediv__ = path_sequence Path.__invert__ = inv_path Path.__neg__ = neg_path Path.__mul__ = mul_path Path.__or__ = path_alternative Path.__truediv__ = path_sequence rdflib-6.1.1/rdflib/plugin.py000066400000000000000000000304351415774155300161140ustar00rootroot00000000000000""" Plugin support for rdf. There are a number of plugin points for rdf: parser, serializer, store, query processor, and query result. Plugins can be registered either through setuptools entry_points or by calling rdf.plugin.register directly. If you have a package that uses a setuptools based setup.py you can add the following to your setup:: entry_points = { 'rdf.plugins.parser': [ 'nt = rdf.plugins.parsers.ntriples:NTParser', ], 'rdf.plugins.serializer': [ 'nt = rdf.plugins.serializers.NTSerializer:NTSerializer', ], } See the `setuptools dynamic discovery of services and plugins`__ for more information. .. __: http://peak.telecommunity.com/DevCenter/setuptools#dynamic-discovery-of-services-and-plugins """ from rdflib.store import Store from rdflib.parser import Parser from rdflib.serializer import Serializer from rdflib.query import ( ResultParser, ResultSerializer, Processor, Result, UpdateProcessor, ) from rdflib.exceptions import Error import sys from typing import ( TYPE_CHECKING, Any, Dict, Generic, Iterator, Optional, Tuple, Type, TypeVar, overload, ) if TYPE_CHECKING: from pkg_resources import EntryPoint __all__ = ["register", "get", "plugins", "PluginException", "Plugin", "PKGPlugin"] rdflib_entry_points = { "rdf.plugins.store": Store, "rdf.plugins.serializer": Serializer, "rdf.plugins.parser": Parser, "rdf.plugins.resultparser": ResultParser, "rdf.plugins.resultserializer": ResultSerializer, "rdf.plugins.queryprocessor": Processor, "rdf.plugins.queryresult": Result, "rdf.plugins.updateprocessor": UpdateProcessor, } _plugins: Dict[Tuple[str, Type[Any]], "Plugin"] = {} class PluginException(Error): pass PluginT = TypeVar("PluginT") class Plugin(Generic[PluginT]): def __init__( self, name: str, kind: Type[PluginT], module_path: str, class_name: str ): self.name = name self.kind = kind self.module_path = module_path self.class_name = class_name self._class: Optional[Type[PluginT]] = None def getClass(self) -> Type[PluginT]: if self._class is None: module = __import__(self.module_path, globals(), locals(), [""]) self._class = getattr(module, self.class_name) return self._class class PKGPlugin(Plugin[PluginT]): def __init__(self, name: str, kind: Type[PluginT], ep: "EntryPoint"): self.name = name self.kind = kind self.ep = ep self._class: Optional[Type[PluginT]] = None def getClass(self) -> Type[PluginT]: if self._class is None: self._class = self.ep.load() return self._class def register(name: str, kind: Type[Any], module_path, class_name): """ Register the plugin for (name, kind). The module_path and class_name should be the path to a plugin class. """ p = Plugin(name, kind, module_path, class_name) _plugins[(name, kind)] = p def get(name: str, kind: Type[PluginT]) -> Type[PluginT]: """ Return the class for the specified (name, kind). Raises a PluginException if unable to do so. """ try: p: Plugin[PluginT] = _plugins[(name, kind)] except KeyError: raise PluginException("No plugin registered for (%s, %s)" % (name, kind)) return p.getClass() if sys.version_info < (3, 8): from importlib_metadata import entry_points else: from importlib.metadata import entry_points all_entry_points = entry_points() if hasattr(all_entry_points, "select"): for entry_point, kind in rdflib_entry_points.items(): for ep in all_entry_points.select(group=entry_point): _plugins[(ep.name, kind)] = PKGPlugin(ep.name, kind, ep) else: # Prior to Python 3.10, this returns a dict instead of the selection interface, which is slightly slower for entry_point, kind in rdflib_entry_points.items(): for ep in all_entry_points.get(entry_point, []): # type: ignore[union-attr] _plugins[(ep.name, kind)] = PKGPlugin(ep.name, kind, ep) @overload def plugins( name: Optional[str] = ..., kind: Type[PluginT] = ... ) -> Iterator[Plugin[PluginT]]: ... @overload def plugins(name: Optional[str] = ..., kind: None = ...) -> Iterator[Plugin]: ... def plugins( name: Optional[str] = None, kind: Optional[Type[PluginT]] = None ) -> Iterator[Plugin]: """ A generator of the plugins. Pass in name and kind to filter... else leave None to match all. """ for p in _plugins.values(): if (name is None or name == p.name) and (kind is None or kind == p.kind): yield p # Register Stores register( "default", Store, "rdflib.plugins.stores.memory", "Memory", ) register( "Memory", Store, "rdflib.plugins.stores.memory", "Memory", ) register( "SimpleMemory", Store, "rdflib.plugins.stores.memory", "SimpleMemory", ) register( "Auditable", Store, "rdflib.plugins.stores.auditable", "AuditableStore", ) register( "Concurrent", Store, "rdflib.plugins.stores.concurrent", "ConcurrentStore", ) register( "BerkeleyDB", Store, "rdflib.plugins.stores.berkeleydb", "BerkeleyDB", ) register( "SPARQLStore", Store, "rdflib.plugins.stores.sparqlstore", "SPARQLStore", ) register( "SPARQLUpdateStore", Store, "rdflib.plugins.stores.sparqlstore", "SPARQLUpdateStore", ) # Register Triple Serializers register( "application/rdf+xml", Serializer, "rdflib.plugins.serializers.rdfxml", "XMLSerializer", ) register( "xml", Serializer, "rdflib.plugins.serializers.rdfxml", "XMLSerializer", ) register( "pretty-xml", Serializer, "rdflib.plugins.serializers.rdfxml", "PrettyXMLSerializer", ) register( "text/n3", Serializer, "rdflib.plugins.serializers.n3", "N3Serializer", ) register( "n3", Serializer, "rdflib.plugins.serializers.n3", "N3Serializer", ) register( "text/turtle", Serializer, "rdflib.plugins.serializers.turtle", "TurtleSerializer", ) register( "turtle", Serializer, "rdflib.plugins.serializers.turtle", "TurtleSerializer", ) register( "ttl", Serializer, "rdflib.plugins.serializers.turtle", "TurtleSerializer", ) register( "longturtle", Serializer, "rdflib.plugins.serializers.longturtle", "LongTurtleSerializer", ) register( "application/n-triples", Serializer, "rdflib.plugins.serializers.nt", "NTSerializer", ) register( "ntriples", Serializer, "rdflib.plugins.serializers.nt", "NTSerializer", ) register( "nt", Serializer, "rdflib.plugins.serializers.nt", "NTSerializer", ) register( "nt11", Serializer, "rdflib.plugins.serializers.nt", "NT11Serializer", ) register( "json-ld", Serializer, "rdflib.plugins.serializers.jsonld", "JsonLDSerializer", ) register( "application/ld+json", Serializer, "rdflib.plugins.serializers.jsonld", "JsonLDSerializer", ) # Register Quad Serializers register( "application/n-quads", Serializer, "rdflib.plugins.serializers.nquads", "NQuadsSerializer", ) register( "nquads", Serializer, "rdflib.plugins.serializers.nquads", "NQuadsSerializer", ) register( "application/trix", Serializer, "rdflib.plugins.serializers.trix", "TriXSerializer", ) register( "trix", Serializer, "rdflib.plugins.serializers.trix", "TriXSerializer", ) register( "application/trig", Serializer, "rdflib.plugins.serializers.trig", "TrigSerializer", ) register( "trig", Serializer, "rdflib.plugins.serializers.trig", "TrigSerializer", ) register( "hext", Serializer, "rdflib.plugins.serializers.hext", "HextuplesSerializer", ) # Register Triple Parsers register( "application/rdf+xml", Parser, "rdflib.plugins.parsers.rdfxml", "RDFXMLParser", ) register( "xml", Parser, "rdflib.plugins.parsers.rdfxml", "RDFXMLParser", ) register( "text/n3", Parser, "rdflib.plugins.parsers.notation3", "N3Parser", ) register( "n3", Parser, "rdflib.plugins.parsers.notation3", "N3Parser", ) register( "text/turtle", Parser, "rdflib.plugins.parsers.notation3", "TurtleParser", ) register( "turtle", Parser, "rdflib.plugins.parsers.notation3", "TurtleParser", ) register( "ttl", Parser, "rdflib.plugins.parsers.notation3", "TurtleParser", ) register( "application/n-triples", Parser, "rdflib.plugins.parsers.ntriples", "NTParser", ) register( "ntriples", Parser, "rdflib.plugins.parsers.ntriples", "NTParser", ) register( "nt", Parser, "rdflib.plugins.parsers.ntriples", "NTParser", ) register( "nt11", Parser, "rdflib.plugins.parsers.ntriples", "NTParser", ) register( "application/ld+json", Parser, "rdflib.plugins.parsers.jsonld", "JsonLDParser", ) register( "json-ld", Parser, "rdflib.plugins.parsers.jsonld", "JsonLDParser", ) # Register Quad Parsers register( "application/n-quads", Parser, "rdflib.plugins.parsers.nquads", "NQuadsParser", ) register( "nquads", Parser, "rdflib.plugins.parsers.nquads", "NQuadsParser", ) register( "application/trix", Parser, "rdflib.plugins.parsers.trix", "TriXParser", ) register( "trix", Parser, "rdflib.plugins.parsers.trix", "TriXParser", ) register( "application/trig", Parser, "rdflib.plugins.parsers.trig", "TrigParser", ) register( "trig", Parser, "rdflib.plugins.parsers.trig", "TrigParser", ) register( "hext", Parser, "rdflib.plugins.parsers.hext", "HextuplesParser", ) # Register SPARQL Processors register( "sparql", Result, "rdflib.plugins.sparql.processor", "SPARQLResult", ) register( "sparql", Processor, "rdflib.plugins.sparql.processor", "SPARQLProcessor", ) register( "sparql", UpdateProcessor, "rdflib.plugins.sparql.processor", "SPARQLUpdateProcessor", ) # Register SPARQL Result Serializers register( "xml", ResultSerializer, "rdflib.plugins.sparql.results.xmlresults", "XMLResultSerializer", ) register( "application/sparql-results+xml", ResultSerializer, "rdflib.plugins.sparql.results.xmlresults", "XMLResultSerializer", ) register( "txt", ResultSerializer, "rdflib.plugins.sparql.results.txtresults", "TXTResultSerializer", ) register( "json", ResultSerializer, "rdflib.plugins.sparql.results.jsonresults", "JSONResultSerializer", ) register( "application/sparql-results+json", ResultSerializer, "rdflib.plugins.sparql.results.jsonresults", "JSONResultSerializer", ) register( "csv", ResultSerializer, "rdflib.plugins.sparql.results.csvresults", "CSVResultSerializer", ) register( "text/csv", ResultSerializer, "rdflib.plugins.sparql.results.csvresults", "CSVResultSerializer", ) # Register SPARQL Result Parsers register( "xml", ResultParser, "rdflib.plugins.sparql.results.xmlresults", "XMLResultParser", ) register( "application/sparql-results+xml", ResultParser, "rdflib.plugins.sparql.results.xmlresults", "XMLResultParser", ) register( "application/sparql-results+xml; charset=UTF-8", ResultParser, "rdflib.plugins.sparql.results.xmlresults", "XMLResultParser", ) register( "application/rdf+xml", ResultParser, "rdflib.plugins.sparql.results.graph", "GraphResultParser", ) register( "json", ResultParser, "rdflib.plugins.sparql.results.jsonresults", "JSONResultParser", ) register( "application/sparql-results+json", ResultParser, "rdflib.plugins.sparql.results.jsonresults", "JSONResultParser", ) register( "csv", ResultParser, "rdflib.plugins.sparql.results.csvresults", "CSVResultParser", ) register( "text/csv", ResultParser, "rdflib.plugins.sparql.results.csvresults", "CSVResultParser", ) register( "tsv", ResultParser, "rdflib.plugins.sparql.results.tsvresults", "TSVResultParser", ) register( "text/tab-separated-values", ResultParser, "rdflib.plugins.sparql.results.tsvresults", "TSVResultParser", ) rdflib-6.1.1/rdflib/plugins/000077500000000000000000000000001415774155300157205ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/__init__.py000066400000000000000000000001571415774155300200340ustar00rootroot00000000000000""" Default plugins for rdflib. This is a namespace package and contains the default plugins for rdflib. """ rdflib-6.1.1/rdflib/plugins/parsers/000077500000000000000000000000001415774155300173775ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/parsers/RDFVOC.py000066400000000000000000000006671415774155300207450ustar00rootroot00000000000000from rdflib.namespace import RDF from rdflib.term import URIRef class RDFVOC(RDF): _underscore_num = True _fail = True # http://www.w3.org/TR/rdf-syntax-grammar/#eventterm-attribute-URI # A mapping from unqualified terms to their qualified version. RDF: URIRef Description: URIRef ID: URIRef about: URIRef parseType: URIRef resource: URIRef li: URIRef nodeID: URIRef datatype: URIRef rdflib-6.1.1/rdflib/plugins/parsers/__init__.py000066400000000000000000000000111415774155300215000ustar00rootroot00000000000000""" """ rdflib-6.1.1/rdflib/plugins/parsers/hext.py000066400000000000000000000055171415774155300207310ustar00rootroot00000000000000""" This is a rdflib plugin for parsing Hextuple files, which are Newline-Delimited JSON (ndjson) files, into Conjunctive. The store that backs the graph *must* be able to handle contexts, i.e. multiple graphs. """ import json from typing import List, Union from rdflib.parser import Parser from rdflib import ConjunctiveGraph, URIRef, Literal, BNode import warnings __all__ = ["HextuplesParser"] class HextuplesParser(Parser): """ An RDFLib parser for Hextuples """ def __init__(self): pass def _load_json_line(self, line: str): return [x if x != "" else None for x in json.loads(line)] def _parse_hextuple(self, cg: ConjunctiveGraph, tup: List[Union[str, None]]): # all values check # subject, predicate, value, datatype cannot be None # language and graph may be None if tup[0] is None or tup[1] is None or tup[2] is None or tup[3] is None: raise ValueError("subject, predicate, value, datatype cannot be None") # 1 - subject s: Union[URIRef, BNode] if tup[0].startswith("_"): s = BNode(value=tup[0].replace("_:", "")) else: s = URIRef(tup[0]) # 2 - predicate p = URIRef(tup[1]) # 3 - value o: Union[URIRef, BNode, Literal] if tup[3] == "globalId": o = URIRef(tup[2]) elif tup[3] == "localId": o = BNode(value=tup[2].replace("_:", "")) else: # literal if tup[4] is None: o = Literal(tup[2], datatype=URIRef(tup[3])) else: o = Literal(tup[2], lang=tup[4]) # 6 - context if tup[5] is not None: c = URIRef(tup[5]) cg.add((s, p, o, c)) else: cg.add((s, p, o)) def parse(self, source, graph, **kwargs): if kwargs.get("encoding") not in [None, "utf-8"]: warnings.warn( f"Hextuples files are always utf-8 encoded, " f"I was passed: {kwargs.get('encoding')}, " "but I'm still going to use utf-8" ) assert ( graph.store.context_aware ), "Hextuples Parser needs a context-aware store!" cg = ConjunctiveGraph(store=graph.store, identifier=graph.identifier) cg.default_context = graph # handle different source types - only file and string (data) for now if hasattr(source, "file"): with open(source.file.name) as fp: for l in fp: self._parse_hextuple(cg, self._load_json_line(l)) elif hasattr(source, "_InputSource__bytefile"): if hasattr(source._InputSource__bytefile, "wrapped"): for l in source._InputSource__bytefile.wrapped.strip().splitlines(): self._parse_hextuple(cg, self._load_json_line(l)) rdflib-6.1.1/rdflib/plugins/parsers/jsonld.py000066400000000000000000000422171415774155300212500ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ This parser will interpret a JSON-LD document as an RDF Graph. See: http://json-ld.org/ Example usage:: >>> from rdflib import Graph, URIRef, Literal >>> test_json = ''' ... { ... "@context": { ... "dc": "http://purl.org/dc/terms/", ... "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", ... "rdfs": "http://www.w3.org/2000/01/rdf-schema#" ... }, ... "@id": "http://example.org/about", ... "dc:title": { ... "@language": "en", ... "@value": "Someone's Homepage" ... } ... } ... ''' >>> g = Graph().parse(data=test_json, format='json-ld') >>> list(g) == [(URIRef('http://example.org/about'), ... URIRef('http://purl.org/dc/terms/title'), ... Literal("Someone's Homepage", lang='en'))] True """ # From: https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/parser.py # NOTE: This code reads the entire JSON object into memory before parsing, but # we should consider streaming the input to deal with arbitrarily large graphs. import warnings from rdflib.graph import ConjunctiveGraph from rdflib.parser import URLInputSource import rdflib.parser from rdflib.namespace import RDF, XSD from rdflib.term import URIRef, BNode, Literal from ..shared.jsonld.context import Context, Term, UNDEF from ..shared.jsonld.util import ( json, source_to_json, VOCAB_DELIMS, context_from_urlinputsource, ) from ..shared.jsonld.keys import ( CONTEXT, GRAPH, ID, INCLUDED, INDEX, JSON, LANG, LIST, NEST, NONE, REV, SET, TYPE, VALUE, VOCAB, ) __all__ = ["JsonLDParser", "to_rdf"] # Add jsonld suffix so RDFLib can guess format from file name try: from rdflib.util import SUFFIX_FORMAT_MAP if "jsonld" not in SUFFIX_FORMAT_MAP: SUFFIX_FORMAT_MAP["jsonld"] = "application/ld+json" except ImportError: pass TYPE_TERM = Term(str(RDF.type), TYPE, VOCAB) # type: ignore[call-arg] ALLOW_LISTS_OF_LISTS = True # NOTE: Not allowed in JSON-LD 1.0 class JsonLDParser(rdflib.parser.Parser): def __init__(self): super(JsonLDParser, self).__init__() def parse(self, source, sink, **kwargs): # TODO: docstring w. args and return value encoding = kwargs.get("encoding") or "utf-8" if encoding not in ("utf-8", "utf-16"): warnings.warn( "JSON should be encoded as unicode. " "Given encoding was: %s" % encoding ) base = kwargs.get("base") or sink.absolutize( source.getPublicId() or source.getSystemId() or "" ) context_data = kwargs.get("context") if not context_data and isinstance(source, URLInputSource): context_data = context_from_urlinputsource(source) try: version = float(kwargs.get("version", "1.0")) except ValueError: version = None generalized_rdf = kwargs.get("generalized_rdf", False) data = source_to_json(source) # NOTE: A ConjunctiveGraph parses into a Graph sink, so no sink will be # context_aware. Keeping this check in case RDFLib is changed, or # someone passes something context_aware to this parser directly. if not sink.context_aware: conj_sink = ConjunctiveGraph(store=sink.store, identifier=sink.identifier) else: conj_sink = sink to_rdf(data, conj_sink, base, context_data, version, generalized_rdf) def to_rdf( data, dataset, base=None, context_data=None, version=None, generalized_rdf=False, allow_lists_of_lists=None, ): # TODO: docstring w. args and return value context = Context(base=base, version=version) if context_data: context.load(context_data) parser = Parser( generalized_rdf=generalized_rdf, allow_lists_of_lists=allow_lists_of_lists ) return parser.parse(data, context, dataset) class Parser(object): def __init__(self, generalized_rdf=False, allow_lists_of_lists=None): self.generalized_rdf = generalized_rdf self.allow_lists_of_lists = ( allow_lists_of_lists if allow_lists_of_lists is not None else ALLOW_LISTS_OF_LISTS ) def parse(self, data, context, dataset): topcontext = False if isinstance(data, list): resources = data elif isinstance(data, dict): local_context = data.get(CONTEXT) if local_context: context.load(local_context, context.base) topcontext = True resources = data if not isinstance(resources, list): resources = [resources] if context.vocab: dataset.bind(None, context.vocab) for name, term in context.terms.items(): if term.id and term.id.endswith(VOCAB_DELIMS): dataset.bind(name, term.id) graph = dataset.default_context if dataset.context_aware else dataset for node in resources: self._add_to_graph(dataset, graph, context, node, topcontext) return graph def _add_to_graph(self, dataset, graph, context, node, topcontext=False): if not isinstance(node, dict) or context.get_value(node): return if CONTEXT in node and not topcontext: local_context = node[CONTEXT] if local_context: context = context.subcontext(local_context) else: context = Context(base=context.doc_base) context = context.get_context_for_type(node) id_val = context.get_id(node) if id_val is None: id_val = self._get_nested_id(context, node) if isinstance(id_val, str): subj = self._to_rdf_id(context, id_val) else: subj = BNode() if subj is None: return None # NOTE: crude way to signify that this node might represent a named graph no_id = id_val is None for key, obj in node.items(): if key == CONTEXT or key in context.get_keys(ID): continue if key == REV or key in context.get_keys(REV): for rkey, robj in obj.items(): self._key_to_graph( dataset, graph, context, subj, rkey, robj, reverse=True, no_id=no_id, ) else: self._key_to_graph(dataset, graph, context, subj, key, obj, no_id=no_id) return subj def _get_nested_id(self, context, node): for key, obj in node.items(): if context.version >= 1.1 and key in context.get_keys(NEST): term = context.terms.get(key) if term and term.id is None: continue objs = obj if isinstance(obj, list) else [obj] for obj in objs: if not isinstance(obj, dict): continue id_val = context.get_id(obj) if not id_val: subcontext = context.get_context_for_term( context.terms.get(key) ) id_val = self._get_nested_id(subcontext, obj) if isinstance(id_val, str): return id_val def _key_to_graph( self, dataset, graph, context, subj, key, obj, reverse=False, no_id=False ): if isinstance(obj, list): obj_nodes = obj else: obj_nodes = [obj] term = context.terms.get(key) if term: term_id = term.id if term.type == JSON: obj_nodes = [self._to_typed_json_value(obj)] elif LIST in term.container: obj_nodes = [{LIST: obj_nodes}] elif isinstance(obj, dict): obj_nodes = self._parse_container(context, term, obj) else: term_id = None if TYPE in (key, term_id): term = TYPE_TERM if GRAPH in (key, term_id): if dataset.context_aware and not no_id: subgraph = dataset.get_context(subj) else: subgraph = graph for onode in obj_nodes: self._add_to_graph(dataset, subgraph, context, onode) return if SET in (key, term_id): for onode in obj_nodes: self._add_to_graph(dataset, graph, context, onode) return if INCLUDED in (key, term_id): for onode in obj_nodes: self._add_to_graph(dataset, graph, context, onode) return if context.version >= 1.1 and key in context.get_keys(NEST): term = context.terms.get(key) if term and term.id is None: return objs = obj if isinstance(obj, list) else [obj] for obj in objs: if not isinstance(obj, dict): continue for nkey, nobj in obj.items(): # NOTE: we've already captured subject if nkey in context.get_keys(ID): continue subcontext = context.get_context_for_type(obj) self._key_to_graph(dataset, graph, subcontext, subj, nkey, nobj) return pred_uri = term.id if term else context.expand(key) context = context.get_context_for_term(term) flattened = [] for obj in obj_nodes: if isinstance(obj, dict): objs = context.get_set(obj) if objs is not None: obj = objs if isinstance(obj, list): flattened += obj continue flattened.append(obj) obj_nodes = flattened if not pred_uri: return if term and term.reverse: reverse = not reverse bid = self._get_bnodeid(pred_uri) if bid: if not self.generalized_rdf: return pred = BNode(bid) else: pred = URIRef(pred_uri) for obj_node in obj_nodes: obj = self._to_object(dataset, graph, context, term, obj_node) if obj is None: continue if reverse: graph.add((obj, pred, subj)) else: graph.add((subj, pred, obj)) def _parse_container(self, context, term, obj): if LANG in term.container: obj_nodes = [] for lang, values in obj.items(): if not isinstance(values, list): values = [values] if lang in context.get_keys(NONE): obj_nodes += values else: for v in values: obj_nodes.append((v, lang)) return obj_nodes v11 = context.version >= 1.1 if v11 and GRAPH in term.container and ID in term.container: return [ dict({GRAPH: o}) if k in context.get_keys(NONE) else dict({ID: k, GRAPH: o}) if isinstance(o, dict) else o for k, o in obj.items() ] elif v11 and GRAPH in term.container and INDEX in term.container: return [dict({GRAPH: o}) for k, o in obj.items()] elif v11 and GRAPH in term.container: return [dict({GRAPH: obj})] elif v11 and ID in term.container: return [ dict({ID: k}, **o) if isinstance(o, dict) and k not in context.get_keys(NONE) else o for k, o in obj.items() ] elif v11 and TYPE in term.container: return [ self._add_type( context, {ID: context.expand(o) if term.type == VOCAB else o} if isinstance(o, str) else o, k, ) if isinstance(o, (dict, str)) and k not in context.get_keys(NONE) else o for k, o in obj.items() ] elif INDEX in term.container: obj_nodes = [] for key, nodes in obj.items(): if not isinstance(nodes, list): nodes = [nodes] for node in nodes: if v11 and term.index and key not in context.get_keys(NONE): if not isinstance(node, dict): node = {ID: node} values = node.get(term.index, []) if not isinstance(values, list): values = [values] values.append(key) node[term.index] = values obj_nodes.append(node) return obj_nodes return [obj] @staticmethod def _add_type(context, o, k): otype = context.get_type(o) or [] if otype and not isinstance(otype, list): otype = [otype] otype.append(k) o[TYPE] = otype return o def _to_object(self, dataset, graph, context, term, node, inlist=False): if isinstance(node, tuple): value, lang = node if value is None: return if lang and " " in lang: return return Literal(value, lang=lang) if isinstance(node, dict): node_list = context.get_list(node) if node_list is not None: if inlist and not self.allow_lists_of_lists: return listref = self._add_list(dataset, graph, context, term, node_list) if listref: return listref else: # expand compacted value if term and term.type: if term.type == JSON: node = self._to_typed_json_value(node) elif node is None: return elif term.type == ID and isinstance(node, str): node = {ID: context.resolve(node)} elif term.type == VOCAB and isinstance(node, str): node = {ID: context.expand(node) or context.resolve_iri(node)} else: node = {TYPE: term.type, VALUE: node} else: if node is None: return if isinstance(node, float): return Literal(node, datatype=XSD.double) if term and term.language is not UNDEF: lang = term.language else: lang = context.language return Literal(node, lang=lang) lang = context.get_language(node) datatype = not lang and context.get_type(node) or None value = context.get_value(node) if datatype in context.get_keys(JSON): node = self._to_typed_json_value(value) datatype = context.get_type(node) value = context.get_value(node) if lang or context.get_key(VALUE) in node or VALUE in node: if value is None: return None if lang: if " " in lang: return return Literal(value, lang=lang) elif datatype: return Literal(value, datatype=context.expand(datatype)) else: return Literal(value) else: return self._add_to_graph(dataset, graph, context, node) def _to_rdf_id(self, context, id_val): bid = self._get_bnodeid(id_val) if bid: return BNode(bid) else: uri = context.resolve(id_val) if not self.generalized_rdf and ":" not in uri: return None return URIRef(uri) def _get_bnodeid(self, ref): if not ref.startswith("_:"): return bid = ref.split("_:", 1)[-1] return bid or None def _add_list(self, dataset, graph, context, term, node_list): if not isinstance(node_list, list): node_list = [node_list] first_subj = BNode() subj, rest = first_subj, None for node in node_list: if node is None: continue if rest: graph.add((subj, RDF.rest, rest)) subj = rest obj = self._to_object(dataset, graph, context, term, node, inlist=True) if obj is None: continue graph.add((subj, RDF.first, obj)) rest = BNode() if rest: graph.add((subj, RDF.rest, RDF.nil)) return first_subj else: return RDF.nil @staticmethod def _to_typed_json_value(value): return { TYPE: URIRef("%sJSON" % str(RDF)), VALUE: json.dumps( value, separators=(",", ":"), sort_keys=True, ensure_ascii=False ), } rdflib-6.1.1/rdflib/plugins/parsers/notation3.py000077500000000000000000001702171415774155300217020ustar00rootroot00000000000000#!/usr/bin/env python """ notation3.py - Standalone Notation3 Parser Derived from CWM, the Closed World Machine Authors of the original suite: * Dan Connolly <@@> * Tim Berners-Lee <@@> * Yosi Scharf <@@> * Joseph M. Reagle Jr. * Rich Salz http://www.w3.org/2000/10/swap/notation3.py Copyright 2000-2007, World Wide Web Consortium. Copyright 2001, MIT. Copyright 2001, Zolera Systems Inc. License: W3C Software License http://www.w3.org/Consortium/Legal/copyright-software Modified by Sean B. Palmer Copyright 2007, Sean B. Palmer. Modified to work with rdflib by Gunnar Aastrand Grimnes Copyright 2010, Gunnar A. Grimnes """ import sys import os import re import codecs from decimal import Decimal from uuid import uuid4 from rdflib.exceptions import ParserError from rdflib.term import URIRef, BNode, Literal, Variable, _XSD_PFX, _unique_id from rdflib.graph import QuotedGraph, ConjunctiveGraph, Graph from rdflib.compat import long_type __all__ = [ "BadSyntax", "N3Parser", "TurtleParser", "splitFragP", "join", "base", "runNamespace", "uniqueURI", "hexify", ] from rdflib.parser import Parser def splitFragP(uriref, punct=0): """split a URI reference before the fragment Punctuation is kept. e.g. >>> splitFragP("abc#def") ('abc', '#def') >>> splitFragP("abcdef") ('abcdef', '') """ i = uriref.rfind("#") if i >= 0: return uriref[:i], uriref[i:] else: return uriref, "" def join(here, there): """join an absolute URI and URI reference (non-ascii characters are supported/doctested; haven't checked the details of the IRI spec though) ``here`` is assumed to be absolute. ``there`` is URI reference. >>> join('http://example/x/y/z', '../abc') 'http://example/x/abc' Raise ValueError if there uses relative path syntax but here has no hierarchical path. >>> join('mid:foo@example', '../foo') # doctest: +NORMALIZE_WHITESPACE Traceback (most recent call last): raise ValueError(here) ValueError: Base has no slash after colon - with relative '../foo'. >>> join('http://example/x/y/z', '') 'http://example/x/y/z' >>> join('mid:foo@example', '#foo') 'mid:foo@example#foo' We grok IRIs >>> len(u'Andr\\xe9') 5 >>> join('http://example.org/', u'#Andr\\xe9') u'http://example.org/#Andr\\xe9' """ # assert(here.find("#") < 0), \ # "Base may not contain hash: '%s'" % here # why must caller splitFrag? slashl = there.find("/") colonl = there.find(":") # join(base, 'foo:/') -- absolute if colonl >= 0 and (slashl < 0 or colonl < slashl): return there bcolonl = here.find(":") assert bcolonl >= 0, ( "Base uri '%s' is not absolute" % here ) # else it's not absolute path, frag = splitFragP(there) if not path: return here + frag # join('mid:foo@example', '../foo') bzzt if here[bcolonl + 1] != "/": raise ValueError( "Base <%s> has no slash after " "colon - with relative '%s'." % (here, there) ) if here[bcolonl + 1 : bcolonl + 3] == "//": bpath = here.find("/", bcolonl + 3) else: bpath = bcolonl + 1 # join('http://xyz', 'foo') if bpath < 0: bpath = len(here) here = here + "/" # join('http://xyz/', '//abc') => 'http://abc' if there[:2] == "//": return here[: bcolonl + 1] + there # join('http://xyz/', '/abc') => 'http://xyz/abc' if there[:1] == "/": return here[:bpath] + there slashr = here.rfind("/") while 1: if path[:2] == "./": path = path[2:] if path == ".": path = "" elif path[:3] == "../" or path == "..": path = path[3:] i = here.rfind("/", bpath, slashr) if i >= 0: here = here[: i + 1] slashr = i else: break return here[: slashr + 1] + path + frag def base(): """The base URI for this process - the Web equiv of cwd Relative or absolute unix-standard filenames parsed relative to this yield the URI of the file. If we had a reliable way of getting a computer name, we should put it in the hostname just to prevent ambiguity """ # return "file://" + hostname + os.getcwd() + "/" return "file://" + _fixslash(os.getcwd()) + "/" def _fixslash(s): """Fix windowslike filename to unixlike - (#ifdef WINDOWS)""" s = s.replace("\\", "/") if s[0] != "/" and s[1] == ":": s = s[2:] # @@@ Hack when drive letter present return s CONTEXT = 0 PRED = 1 SUBJ = 2 OBJ = 3 PARTS = PRED, SUBJ, OBJ ALL4 = CONTEXT, PRED, SUBJ, OBJ SYMBOL = 0 FORMULA = 1 LITERAL = 2 LITERAL_DT = 21 LITERAL_LANG = 22 ANONYMOUS = 3 XMLLITERAL = 25 Logic_NS = "http://www.w3.org/2000/10/swap/log#" NODE_MERGE_URI = Logic_NS + "is" # Pseudo-property indicating node merging forSomeSym = Logic_NS + "forSome" forAllSym = Logic_NS + "forAll" RDF_type_URI = "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" RDF_NS_URI = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" OWL_NS = "http://www.w3.org/2002/07/owl#" DAML_sameAs_URI = OWL_NS + "sameAs" parsesTo_URI = Logic_NS + "parsesTo" RDF_spec = "http://www.w3.org/TR/REC-rdf-syntax/" List_NS = RDF_NS_URI # From 20030808 _Old_Logic_NS = "http://www.w3.org/2000/10/swap/log.n3#" N3_first = (SYMBOL, List_NS + "first") N3_rest = (SYMBOL, List_NS + "rest") N3_li = (SYMBOL, List_NS + "li") N3_nil = (SYMBOL, List_NS + "nil") N3_List = (SYMBOL, List_NS + "List") N3_Empty = (SYMBOL, List_NS + "Empty") runNamespaceValue = None def runNamespace(): """Returns a URI suitable as a namespace for run-local objects""" # @@@ include hostname (privacy?) (hash it?) global runNamespaceValue if runNamespaceValue is None: runNamespaceValue = join(base(), _unique_id()) + "#" return runNamespaceValue nextu = 0 def uniqueURI(): """A unique URI""" global nextu nextu += 1 return runNamespace() + "u_" + str(nextu) tracking = False chatty_flag = 50 # from why import BecauseOfData, becauseSubexpression def BecauseOfData(*args, **kargs): # print args, kargs pass def becauseSubexpression(*args, **kargs): # print args, kargs pass N3_forSome_URI = forSomeSym N3_forAll_URI = forAllSym # Magic resources we know about ADDED_HASH = "#" # Stop where we use this in case we want to remove it! # This is the hash on namespace URIs RDF_type = (SYMBOL, RDF_type_URI) DAML_sameAs = (SYMBOL, DAML_sameAs_URI) LOG_implies_URI = "http://www.w3.org/2000/10/swap/log#implies" BOOLEAN_DATATYPE = _XSD_PFX + "boolean" DECIMAL_DATATYPE = _XSD_PFX + "decimal" DOUBLE_DATATYPE = _XSD_PFX + "double" FLOAT_DATATYPE = _XSD_PFX + "float" INTEGER_DATATYPE = _XSD_PFX + "integer" option_noregen = 0 # If set, do not regenerate genids on output # @@ I18n - the notname chars need extending for well known unicode non-text # characters. The XML spec switched to assuming unknown things were name # characters. # _namechars = string.lowercase + string.uppercase + string.digits + '_-' _notQNameChars = set("\t\r\n !\"#$&'()*,+/;<=>?@[\\]^`{|}~") # else valid qname :-/ _notKeywordsChars = _notQNameChars | {"."} _notNameChars = _notQNameChars | {":"} # Assume anything else valid name :-/ _rdfns = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" hexChars = set("ABCDEFabcdef0123456789") escapeChars = set("(_~.-!$&'()*+,;=/?#@%)") # valid for \ escapes in localnames numberChars = set("0123456789-") numberCharsPlus = numberChars | {"+", "."} def unicodeExpand(m): try: return chr(int(m.group(1), 16)) except: raise Exception("Invalid unicode code point: " + m.group(1)) unicodeEscape4 = re.compile(r"\\u([0-9a-fA-F]{4})") unicodeEscape8 = re.compile(r"\\U([0-9a-fA-F]{8})") N3CommentCharacter = "#" # For unix script # ! compatibility # Parse string to sink # # Regular expressions: eol = re.compile(r"[ \t]*(#[^\n]*)?\r?\n") # end of line, poss. w/comment eof = re.compile(r"[ \t]*(#[^\n]*)?$") # end of file, poss. w/comment ws = re.compile(r"[ \t]*") # Whitespace not including NL signed_integer = re.compile(r"[-+]?[0-9]+") # integer integer_syntax = re.compile(r"[-+]?[0-9]+") decimal_syntax = re.compile(r"[-+]?[0-9]*\.[0-9]+") exponent_syntax = re.compile( r"[-+]?(?:[0-9]+\.[0-9]*|\.[0-9]+|[0-9]+)(?:e|E)[-+]?[0-9]+" ) digitstring = re.compile(r"[0-9]+") # Unsigned integer interesting = re.compile(r"""[\\\r\n\"\']""") langcode = re.compile(r"[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*") class SinkParser: def __init__( self, store, openFormula=None, thisDoc="", baseURI=None, genPrefix="", why=None, turtle=False, ): """note: namespace names should *not* end in # ; the # will get added during qname processing""" self._bindings = {} if thisDoc != "": assert ":" in thisDoc, "Document URI not absolute: <%s>" % thisDoc self._bindings[""] = thisDoc + "#" # default self._store = store if genPrefix: store.setGenPrefix(genPrefix) # pass it on self._thisDoc = thisDoc self.lines = 0 # for error handling self.startOfLine = 0 # For calculating character number self._genPrefix = genPrefix self.keywords = ["a", "this", "bind", "has", "is", "of", "true", "false"] self.keywordsSet = 0 # Then only can others be considered qnames self._anonymousNodes = {} # Dict of anon nodes already declared ln: Term self._variables = {} self._parentVariables = {} self._reason = why # Why the parser was asked to parse this self.turtle = turtle # raise exception when encountering N3 extensions # Turtle allows single or double quotes around strings, whereas N3 # only allows double quotes. self.string_delimiters = ('"', "'") if turtle else ('"',) self._reason2 = None # Why these triples # was: diag.tracking if tracking: self._reason2 = BecauseOfData( store.newSymbol(thisDoc), because=self._reason ) if baseURI: self._baseURI = baseURI else: if thisDoc: self._baseURI = thisDoc else: self._baseURI = None assert not self._baseURI or ":" in self._baseURI if not self._genPrefix: if self._thisDoc: self._genPrefix = self._thisDoc + "#_g" else: self._genPrefix = uniqueURI() if openFormula is None and not turtle: if self._thisDoc: self._formula = store.newFormula(thisDoc + "#_formula") else: self._formula = store.newFormula() else: self._formula = openFormula self._context = self._formula self._parentContext = None def here(self, i): """String generated from position in file This is for repeatability when referring people to bnodes in a document. This has diagnostic uses less formally, as it should point one to which bnode the arbitrary identifier actually is. It gives the line and character number of the '[' charcacter or path character which introduced the blank node. The first blank node is boringly _L1C1. It used to be used only for tracking, but for tests in general it makes the canonical ordering of bnodes repeatable.""" return "%s_L%iC%i" % (self._genPrefix, self.lines, i - self.startOfLine + 1) def formula(self): return self._formula def loadStream(self, stream): return self.loadBuf(stream.read()) # Not ideal def loadBuf(self, buf): """Parses a buffer and returns its top level formula""" self.startDoc() self.feed(buf) return self.endDoc() # self._formula def feed(self, octets): """Feed an octet stream to the parser if BadSyntax is raised, the string passed in the exception object is the remainder after any statements have been parsed. So if there is more data to feed to the parser, it should be straightforward to recover.""" if not isinstance(octets, str): s = octets.decode("utf-8") # NB already decoded, so \ufeff if len(s) > 0 and s[0] == codecs.BOM_UTF8.decode("utf-8"): s = s[1:] else: s = octets i = 0 while i >= 0: j = self.skipSpace(s, i) if j < 0: return i = self.directiveOrStatement(s, j) if i < 0: # print("# next char: %s" % s[j]) self.BadSyntax(s, j, "expected directive or statement") def directiveOrStatement(self, argstr, h): i = self.skipSpace(argstr, h) if i < 0: return i # EOF if self.turtle: j = self.sparqlDirective(argstr, i) if j >= 0: return j j = self.directive(argstr, i) if j >= 0: return self.checkDot(argstr, j) j = self.statement(argstr, i) if j >= 0: return self.checkDot(argstr, j) return j # @@I18N # _namechars = string.lowercase + string.uppercase + string.digits + '_-' def tok(self, tok, argstr, i, colon=False): """Check for keyword. Space must have been stripped on entry and we must not be at end of file. if colon, then keyword followed by colon is ok (@prefix: is ok, rdf:type shortcut a must be followed by ws) """ assert tok[0] not in _notNameChars # not for punctuation if argstr[i] == "@": i += 1 else: if tok not in self.keywords: return -1 # No, this has neither keywords declaration nor "@" i_plus_len_tok = i + len(tok) if ( argstr[i:i_plus_len_tok] == tok and (argstr[i_plus_len_tok] in _notKeywordsChars) or (colon and argstr[i_plus_len_tok] == ":") ): return i_plus_len_tok else: return -1 def sparqlTok(self, tok, argstr, i): """Check for SPARQL keyword. Space must have been stripped on entry and we must not be at end of file. Case insensitive and not preceded by @ """ assert tok[0] not in _notNameChars # not for punctuation len_tok = len(tok) if argstr[i : i + len_tok].lower() == tok.lower() and ( argstr[i + len_tok] in _notQNameChars ): i += len_tok return i else: return -1 def directive(self, argstr, i): j = self.skipSpace(argstr, i) if j < 0: return j # eof res = [] j = self.tok("bind", argstr, i) # implied "#". Obsolete. if j > 0: self.BadSyntax(argstr, i, "keyword bind is obsolete: use @prefix") j = self.tok("keywords", argstr, i) if j > 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'keywords' when in Turtle mode.") i = self.commaSeparatedList(argstr, j, res, self.bareWord) if i < 0: self.BadSyntax( argstr, i, "'@keywords' needs comma separated list of words" ) self.setKeywords(res[:]) return i j = self.tok("forAll", argstr, i) if j > 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'forAll' when in Turtle mode.") i = self.commaSeparatedList(argstr, j, res, self.uri_ref2) if i < 0: self.BadSyntax(argstr, i, "Bad variable list after @forAll") for x in res: # self._context.declareUniversal(x) if x not in self._variables or x in self._parentVariables: self._variables[x] = self._context.newUniversal(x) return i j = self.tok("forSome", argstr, i) if j > 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'forSome' when in Turtle mode.") i = self.commaSeparatedList(argstr, j, res, self.uri_ref2) if i < 0: self.BadSyntax(argstr, i, "Bad variable list after @forSome") for x in res: self._context.declareExistential(x) return i j = self.tok("prefix", argstr, i, colon=True) # no implied "#" if j >= 0: t = [] i = self.qname(argstr, j, t) if i < 0: self.BadSyntax(argstr, j, "expected qname after @prefix") j = self.uri_ref2(argstr, i, t) if j < 0: self.BadSyntax(argstr, i, "expected after @prefix _qname_") ns = self.uriOf(t[1]) if self._baseURI: ns = join(self._baseURI, ns) elif ":" not in ns: self.BadSyntax( argstr, j, f"With no base URI, cannot use relative URI in @prefix <{ns}>", ) assert ":" in ns # must be absolute self._bindings[t[0][0]] = ns self.bind(t[0][0], hexify(ns)) return j j = self.tok("base", argstr, i) # Added 2007/7/7 if j >= 0: t = [] i = self.uri_ref2(argstr, j, t) if i < 0: self.BadSyntax(argstr, j, "expected after @base ") ns = self.uriOf(t[0]) if self._baseURI: ns = join(self._baseURI, ns) else: self.BadSyntax( argstr, j, "With no previous base URI, cannot use " + "relative URI in @base <" + ns + ">", ) assert ":" in ns # must be absolute self._baseURI = ns return i return -1 # Not a directive, could be something else. def sparqlDirective(self, argstr, i): """ turtle and trig support BASE/PREFIX without @ and without terminating . """ j = self.skipSpace(argstr, i) if j < 0: return j # eof j = self.sparqlTok("PREFIX", argstr, i) if j >= 0: t = [] i = self.qname(argstr, j, t) if i < 0: self.BadSyntax(argstr, j, "expected qname after @prefix") j = self.uri_ref2(argstr, i, t) if j < 0: self.BadSyntax(argstr, i, "expected after @prefix _qname_") ns = self.uriOf(t[1]) if self._baseURI: ns = join(self._baseURI, ns) elif ":" not in ns: self.BadSyntax( argstr, j, "With no base URI, cannot use " + "relative URI in @prefix <" + ns + ">", ) assert ":" in ns # must be absolute self._bindings[t[0][0]] = ns self.bind(t[0][0], hexify(ns)) return j j = self.sparqlTok("BASE", argstr, i) if j >= 0: t = [] i = self.uri_ref2(argstr, j, t) if i < 0: self.BadSyntax(argstr, j, "expected after @base ") ns = self.uriOf(t[0]) if self._baseURI: ns = join(self._baseURI, ns) else: self.BadSyntax( argstr, j, "With no previous base URI, cannot use " + "relative URI in @base <" + ns + ">", ) assert ":" in ns # must be absolute self._baseURI = ns return i return -1 # Not a directive, could be something else. def bind(self, qn, uri): assert isinstance(uri, bytes), "Any unicode must be %x-encoded already" if qn == "": self._store.setDefaultNamespace(uri) else: self._store.bind(qn, uri) def setKeywords(self, k): """Takes a list of strings""" if k is None: self.keywordsSet = 0 else: self.keywords = k self.keywordsSet = 1 def startDoc(self): # was: self._store.startDoc() self._store.startDoc(self._formula) def endDoc(self): """Signal end of document and stop parsing. returns formula""" self._store.endDoc(self._formula) # don't canonicalize yet return self._formula def makeStatement(self, quadruple): # $$$$$$$$$$$$$$$$$$$$$ # print "# Parser output: ", `quadruple` self._store.makeStatement(quadruple, why=self._reason2) def statement(self, argstr, i): r = [] i = self.object(argstr, i, r) # Allow literal for subject - extends RDF if i < 0: return i j = self.property_list(argstr, i, r[0]) if j < 0: self.BadSyntax(argstr, i, "expected propertylist") return j def subject(self, argstr, i, res): return self.item(argstr, i, res) def verb(self, argstr, i, res): """has _prop_ is _prop_ of a = _prop_ >- prop -> <- prop -< _operator_""" j = self.skipSpace(argstr, i) if j < 0: return j # eof r = [] j = self.tok("has", argstr, i) if j >= 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'has' keyword in Turtle mode") i = self.prop(argstr, j, r) if i < 0: self.BadSyntax(argstr, j, "expected property after 'has'") res.append(("->", r[0])) return i j = self.tok("is", argstr, i) if j >= 0: if self.turtle: self.BadSyntax(argstr, i, "Found 'is' keyword in Turtle mode") i = self.prop(argstr, j, r) if i < 0: self.BadSyntax(argstr, j, "expected after 'is'") j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax( argstr, i, "End of file found, expected property after 'is'" ) i = j j = self.tok("of", argstr, i) if j < 0: self.BadSyntax(argstr, i, "expected 'of' after 'is' ") res.append(("<-", r[0])) return j j = self.tok("a", argstr, i) if j >= 0: res.append(("->", RDF_type)) return j if argstr[i : i + 2] == "<=": if self.turtle: self.BadSyntax(argstr, i, "Found '<=' in Turtle mode. ") res.append(("<-", self._store.newSymbol(Logic_NS + "implies"))) return i + 2 if argstr[i] == "=": if self.turtle: self.BadSyntax(argstr, i, "Found '=' in Turtle mode") if argstr[i + 1] == ">": res.append(("->", self._store.newSymbol(Logic_NS + "implies"))) return i + 2 res.append(("->", DAML_sameAs)) return i + 1 if argstr[i : i + 2] == ":=": if self.turtle: self.BadSyntax(argstr, i, "Found ':=' in Turtle mode") # patch file relates two formulae, uses this @@ really? res.append(("->", Logic_NS + "becomes")) return i + 2 j = self.prop(argstr, i, r) if j >= 0: res.append(("->", r[0])) return j if argstr[i : i + 2] == ">-" or argstr[i : i + 2] == "<-": self.BadSyntax(argstr, j, ">- ... -> syntax is obsolete.") return -1 def prop(self, argstr, i, res): return self.item(argstr, i, res) def item(self, argstr, i, res): return self.path(argstr, i, res) def blankNode(self, uri=None): return self._store.newBlankNode(self._context, uri, why=self._reason2) def path(self, argstr, i, res): """Parse the path production.""" j = self.nodeOrLiteral(argstr, i, res) if j < 0: return j # nope while argstr[j] in {"!", "^"}: # no spaces, must follow exactly (?) ch = argstr[j] subj = res.pop() obj = self.blankNode(uri=self.here(j)) j = self.node(argstr, j + 1, res) if j < 0: self.BadSyntax(argstr, j, "EOF found in middle of path syntax") pred = res.pop() if ch == "^": # Reverse traverse self.makeStatement((self._context, pred, obj, subj)) else: self.makeStatement((self._context, pred, subj, obj)) res.append(obj) return j def anonymousNode(self, ln): """Remember or generate a term for one of these _: anonymous nodes""" term = self._anonymousNodes.get(ln, None) if term is not None: return term term = self._store.newBlankNode(self._context, why=self._reason2) self._anonymousNodes[ln] = term return term def node(self, argstr, i, res, subjectAlready=None): """Parse the production. Space is now skipped once at the beginning instead of in multiple calls to self.skipSpace(). """ subj = subjectAlready j = self.skipSpace(argstr, i) if j < 0: return j # eof i = j ch = argstr[i] # Quick 1-character checks first: if ch == "[": bnodeID = self.here(i) j = self.skipSpace(argstr, i + 1) if j < 0: self.BadSyntax(argstr, i, "EOF after '['") # Hack for "is" binding name to anon node if argstr[j] == "=": if self.turtle: self.BadSyntax( argstr, j, "Found '[=' or '[ =' when in turtle mode." ) i = j + 1 objs = [] j = self.objectList(argstr, i, objs) if j >= 0: subj = objs[0] if len(objs) > 1: for obj in objs: self.makeStatement((self._context, DAML_sameAs, subj, obj)) j = self.skipSpace(argstr, j) if j < 0: self.BadSyntax( argstr, i, "EOF when objectList expected after [ = " ) if argstr[j] == ";": j += 1 else: self.BadSyntax(argstr, i, "objectList expected after [= ") if subj is None: subj = self.blankNode(uri=bnodeID) i = self.property_list(argstr, j, subj) if i < 0: self.BadSyntax(argstr, j, "property_list expected") j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax( argstr, i, "EOF when ']' expected after [ " ) if argstr[j] != "]": self.BadSyntax(argstr, j, "']' expected") res.append(subj) return j + 1 if not self.turtle and ch == "{": # if self.turtle: # self.BadSyntax(argstr, i, # "found '{' while in Turtle mode, Formulas not supported!") ch2 = argstr[i + 1] if ch2 == "$": # a set i += 1 j = i + 1 List = [] first_run = True while 1: i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax(argstr, i, "needed '$}', found end.") if argstr[i : i + 2] == "$}": j = i + 2 break if not first_run: if argstr[i] == ",": i += 1 else: self.BadSyntax(argstr, i, "expected: ','") else: first_run = False item = [] j = self.item(argstr, i, item) # @@@@@ should be path, was object if j < 0: self.BadSyntax(argstr, i, "expected item in set or '$}'") List.append(self._store.intern(item[0])) res.append(self._store.newSet(List, self._context)) return j else: # parse a formula j = i + 1 oldParentContext = self._parentContext self._parentContext = self._context parentAnonymousNodes = self._anonymousNodes grandParentVariables = self._parentVariables self._parentVariables = self._variables self._anonymousNodes = {} self._variables = self._variables.copy() reason2 = self._reason2 self._reason2 = becauseSubexpression if subj is None: subj = self._store.newFormula() self._context = subj while 1: i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax(argstr, i, "needed '}', found end.") if argstr[i] == "}": j = i + 1 break j = self.directiveOrStatement(argstr, i) if j < 0: self.BadSyntax(argstr, i, "expected statement or '}'") self._anonymousNodes = parentAnonymousNodes self._variables = self._parentVariables self._parentVariables = grandParentVariables self._context = self._parentContext self._reason2 = reason2 self._parentContext = oldParentContext res.append(subj.close()) # No use until closed return j if ch == "(": thing_type = self._store.newList ch2 = argstr[i + 1] if ch2 == "$": thing_type = self._store.newSet i += 1 j = i + 1 List = [] while 1: i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax(argstr, i, "needed ')', found end.") if argstr[i] == ")": j = i + 1 break item = [] j = self.item(argstr, i, item) # @@@@@ should be path, was object if j < 0: self.BadSyntax(argstr, i, "expected item in list or ')'") List.append(self._store.intern(item[0])) res.append(thing_type(List, self._context)) return j j = self.tok("this", argstr, i) # This context if j >= 0: self.BadSyntax( argstr, i, "Keyword 'this' was ancient N3. Now use " + "@forSome and @forAll keywords.", ) # booleans j = self.tok("true", argstr, i) if j >= 0: res.append(True) return j j = self.tok("false", argstr, i) if j >= 0: res.append(False) return j if subj is None: # If this can be a named node, then check for a name. j = self.uri_ref2(argstr, i, res) if j >= 0: return j return -1 def property_list(self, argstr, i, subj): """Parse property list Leaves the terminating punctuation in the buffer """ while 1: while 1: # skip repeat ; j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax( argstr, i, "EOF found when expected verb in property list" ) if argstr[j] != ";": break i = j + 1 if argstr[j : j + 2] == ":-": if self.turtle: self.BadSyntax(argstr, j, "Found in ':-' in Turtle mode") i = j + 2 res = [] j = self.node(argstr, i, res, subj) if j < 0: self.BadSyntax(argstr, i, "bad {} or () or [] node after :- ") i = j continue i = j v = [] j = self.verb(argstr, i, v) if j <= 0: return i # void but valid objs = [] i = self.objectList(argstr, j, objs) if i < 0: self.BadSyntax(argstr, j, "objectList expected") for obj in objs: dira, sym = v[0] if dira == "->": self.makeStatement((self._context, sym, subj, obj)) else: self.makeStatement((self._context, sym, obj, subj)) j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax(argstr, j, "EOF found in list of objects") if argstr[i] != ";": return i i += 1 # skip semicolon and continue def commaSeparatedList(self, argstr, j, res, what): """return value: -1 bad syntax; >1 new position in argstr res has things found appended """ i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax(argstr, i, "EOF found expecting comma sep list") if argstr[i] == ".": return j # empty list is OK i = what(argstr, i, res) if i < 0: return -1 while 1: j = self.skipSpace(argstr, i) if j < 0: return j # eof ch = argstr[j] if ch != ",": if ch != ".": return -1 return j # Found but not swallowed "." i = what(argstr, j + 1, res) if i < 0: self.BadSyntax(argstr, i, "bad list content") def objectList(self, argstr, i, res): i = self.object(argstr, i, res) if i < 0: return -1 while 1: j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax(argstr, j, "EOF found after object") if argstr[j] != ",": return j # Found something else! i = self.object(argstr, j + 1, res) if i < 0: return i def checkDot(self, argstr, i): j = self.skipSpace(argstr, i) if j < 0: return j # eof ch = argstr[j] if ch == ".": return j + 1 # skip if ch == "}": return j # don't skip it if ch == "]": return j self.BadSyntax(argstr, j, "expected '.' or '}' or ']' at end of statement") def uri_ref2(self, argstr, i, res): """Generate uri from n3 representation. Note that the RDF convention of directly concatenating NS and local name is now used though I prefer inserting a '#' to make the namesapces look more like what XML folks expect. """ qn = [] j = self.qname(argstr, i, qn) if j >= 0: pfx, ln = qn[0] if pfx is None: assert 0, "not used?" ns = self._baseURI + ADDED_HASH # type: ignore[unreachable] else: try: ns = self._bindings[pfx] except KeyError: if pfx == "_": # Magic prefix 2001/05/30, can be changed res.append(self.anonymousNode(ln)) return j if not self.turtle and pfx == "": ns = join(self._baseURI or "", "#") else: self.BadSyntax(argstr, i, 'Prefix "%s:" not bound' % (pfx)) symb = self._store.newSymbol(ns + ln) res.append(self._variables.get(symb, symb)) return j i = self.skipSpace(argstr, i) if i < 0: return -1 if argstr[i] == "?": v = [] j = self.variable(argstr, i, v) if j > 0: # Forget variables as a class, only in context. res.append(v[0]) return j return -1 elif argstr[i] == "<": st = i + 1 i = argstr.find(">", st) if i >= 0: uref = argstr[st:i] # the join should dealt with "": # expand unicode escapes uref = unicodeEscape8.sub(unicodeExpand, uref) uref = unicodeEscape4.sub(unicodeExpand, uref) if self._baseURI: uref = join(self._baseURI, uref) # was: uripath.join else: assert ( ":" in uref ), "With no base URI, cannot deal with relative URIs" if argstr[i - 1] == "#" and not uref[-1:] == "#": uref += "#" # She meant it! Weirdness in urlparse? symb = self._store.newSymbol(uref) res.append(self._variables.get(symb, symb)) return i + 1 self.BadSyntax(argstr, j, "unterminated URI reference") elif self.keywordsSet: v = [] j = self.bareWord(argstr, i, v) if j < 0: return -1 # Forget variables as a class, only in context. if v[0] in self.keywords: self.BadSyntax(argstr, i, 'Keyword "%s" not allowed here.' % v[0]) res.append(self._store.newSymbol(self._bindings[""] + v[0])) return j else: return -1 def skipSpace(self, argstr, i): """Skip white space, newlines and comments. return -1 if EOF, else position of first non-ws character""" # Most common case is a non-commented line starting with few spaces and tabs. try: while True: ch = argstr[i] if ch in {" ", "\t"}: i += 1 continue elif ch not in {"#", "\r", "\n"}: return i break except IndexError: return -1 while 1: m = eol.match(argstr, i) if m is None: break self.lines += 1 self.startOfLine = i = m.end() # Point to first character unmatched m = ws.match(argstr, i) if m is not None: i = m.end() m = eof.match(argstr, i) return i if m is None else -1 def variable(self, argstr, i, res): """?abc -> variable(:abc)""" j = self.skipSpace(argstr, i) if j < 0: return -1 if argstr[j] != "?": return -1 j += 1 i = j if argstr[j] in numberChars: self.BadSyntax(argstr, j, "Variable name can't start with '%s'" % argstr[j]) len_argstr = len(argstr) while i < len_argstr and argstr[i] not in _notKeywordsChars: i += 1 if self._parentContext is None: varURI = self._store.newSymbol(self._baseURI + "#" + argstr[j:i]) if varURI not in self._variables: self._variables[varURI] = self._context.newUniversal( varURI, why=self._reason2 ) res.append(self._variables[varURI]) return i # @@ was: # self.BadSyntax(argstr, j, # "Can't use ?xxx syntax for variable in outermost level: %s" # % argstr[j-1:i]) varURI = self._store.newSymbol(self._baseURI + "#" + argstr[j:i]) if varURI not in self._parentVariables: self._parentVariables[varURI] = self._parentContext.newUniversal( varURI, why=self._reason2 ) res.append(self._parentVariables[varURI]) return i def bareWord(self, argstr, i, res): """abc -> :abc""" j = self.skipSpace(argstr, i) if j < 0: return -1 if argstr[j] in numberChars or argstr[j] in _notKeywordsChars: return -1 i = j len_argstr = len(argstr) while i < len_argstr and argstr[i] not in _notKeywordsChars: i += 1 res.append(argstr[j:i]) return i def qname(self, argstr, i, res): """ xyz:def -> ('xyz', 'def') If not in keywords and keywordsSet: def -> ('', 'def') :def -> ('', 'def') """ i = self.skipSpace(argstr, i) if i < 0: return -1 c = argstr[i] if c in numberCharsPlus: return -1 len_argstr = len(argstr) if c not in _notNameChars: j = i i += 1 try: while argstr[i] not in _notNameChars: i += 1 except IndexError: pass # Very rare. if argstr[i - 1] == ".": # qname cannot end with "." i -= 1 if i == j: return -1 ln = argstr[j:i] else: # First character is non-alpha ln = "" # Was: None - TBL (why? useful?) if i < len_argstr and argstr[i] == ":": pfx = ln # bnodes names have different rules if pfx == "_": allowedChars = _notNameChars else: allowedChars = _notQNameChars i += 1 lastslash = False start = i ln = "" while i < len_argstr: c = argstr[i] if c == "\\" and not lastslash: # Very rare. lastslash = True if start < i: ln += argstr[start:i] start = i + 1 elif c not in allowedChars or lastslash: # Most common case is "a-zA-Z" if lastslash: if c not in escapeChars: raise BadSyntax( self._thisDoc, self.line, argstr, i, "illegal escape " + c, ) elif c == "%": # Very rare. if ( argstr[i + 1] not in hexChars or argstr[i + 2] not in hexChars ): raise BadSyntax( self._thisDoc, self.line, argstr, i, "illegal hex escape " + c, ) lastslash = False else: break i += 1 if lastslash: raise BadSyntax( self._thisDoc, self.line, argstr, i, "qname cannot end with \\" ) if argstr[i - 1] == ".": # localname cannot end in . if len(ln) == 0 and start == i: return -1 i -= 1 if start < i: ln += argstr[start:i] res.append((pfx, ln)) return i else: # delimiter was not ":" if ln and self.keywordsSet and ln not in self.keywords: res.append(("", ln)) return i return -1 def object(self, argstr, i, res): j = self.subject(argstr, i, res) if j >= 0: return j else: j = self.skipSpace(argstr, i) if j < 0: return -1 else: i = j ch = argstr[i] if ch in self.string_delimiters: ch_three = ch * 3 if argstr[i : i + 3] == ch_three: delim = ch_three i += 3 else: delim = ch i += 1 j, s = self.strconst(argstr, i, delim) res.append(self._store.newLiteral(s)) return j else: return -1 def nodeOrLiteral(self, argstr, i, res): j = self.node(argstr, i, res) startline = self.lines # Remember where for error messages if j >= 0: return j else: j = self.skipSpace(argstr, i) if j < 0: return -1 else: i = j ch = argstr[i] if ch in numberCharsPlus: m = exponent_syntax.match(argstr, i) if m: j = m.end() res.append(float(argstr[i:j])) return j m = decimal_syntax.match(argstr, i) if m: j = m.end() res.append(Decimal(argstr[i:j])) return j m = integer_syntax.match(argstr, i) if m: j = m.end() res.append(long_type(argstr[i:j])) return j # return -1 ## or fall through? ch_three = ch * 3 if ch in self.string_delimiters: if argstr[i : i + 3] == ch_three: delim = ch_three i += 3 else: delim = ch i += 1 dt = None j, s = self.strconst(argstr, i, delim) lang = None if argstr[j] == "@": # Language? m = langcode.match(argstr, j + 1) if m is None: raise BadSyntax( self._thisDoc, startline, argstr, i, "Bad language code syntax on string " + "literal, after @", ) i = m.end() lang = argstr[j + 1 : i] j = i if argstr[j : j + 2] == "^^": res2 = [] j = self.uri_ref2(argstr, j + 2, res2) # Read datatype URI dt = res2[0] res.append(self._store.newLiteral(s, dt, lang)) return j else: return -1 def uriOf(self, sym): if isinstance(sym, tuple): return sym[1] # old system for --pipe # return sym.uriref() # cwm api return sym def strconst(self, argstr, i, delim): """parse an N3 string constant delimited by delim. return index, val """ delim1 = delim[0] delim2, delim3, delim4, delim5 = delim1 * 2, delim1 * 3, delim1 * 4, delim1 * 5 j = i ustr = "" # Empty unicode string startline = self.lines # Remember where for error messages len_argstr = len(argstr) while j < len_argstr: if argstr[j] == delim1: if delim == delim1: # done when delim is " or ' i = j + 1 return i, ustr if ( delim == delim3 ): # done when delim is """ or ''' and, respectively ... if argstr[j : j + 5] == delim5: # ... we have "" or '' before i = j + 5 ustr += delim2 return i, ustr if argstr[j : j + 4] == delim4: # ... we have " or ' before i = j + 4 ustr += delim1 return i, ustr if argstr[j : j + 3] == delim3: # current " or ' is part of delim i = j + 3 return i, ustr # we are inside of the string and current char is " or ' j += 1 ustr += delim1 continue m = interesting.search(argstr, j) # was argstr[j:]. # Note for pos param to work, MUST be compiled ... re bug? assert m, "Quote expected in string at ^ in %s^%s" % ( argstr[j - 20 : j], argstr[j : j + 20], ) # at least need a quote i = m.start() try: ustr += argstr[j:i] except UnicodeError: err = "" for c in argstr[j:i]: err = err + (" %02x" % ord(c)) streason = sys.exc_info()[1].__str__() raise BadSyntax( self._thisDoc, startline, argstr, j, "Unicode error appending characters" + " %s to string, because\n\t%s" % (err, streason), ) # print "@@@ i = ",i, " j=",j, "m.end=", m.end() ch = argstr[i] if ch == delim1: j = i continue elif ch in {'"', "'"} and ch != delim1: ustr += ch j = i + 1 continue elif ch in {"\r", "\n"}: if delim == delim1: raise BadSyntax( self._thisDoc, startline, argstr, i, "newline found in string literal", ) self.lines += 1 ustr += ch j = i + 1 self.startOfLine = j elif ch == "\\": j = i + 1 ch = argstr[j] # Will be empty if string ends if not ch: raise BadSyntax( self._thisDoc, startline, argstr, i, "unterminated string literal (2)", ) k = "abfrtvn\\\"'".find(ch) if k >= 0: uch = "\a\b\f\r\t\v\n\\\"'"[k] ustr += uch j += 1 elif ch == "u": j, ch = self.uEscape(argstr, j + 1, startline) ustr += ch elif ch == "U": j, ch = self.UEscape(argstr, j + 1, startline) ustr += ch else: self.BadSyntax(argstr, i, "bad escape") self.BadSyntax(argstr, i, "unterminated string literal") def _unicodeEscape(self, argstr, i, startline, reg, n, prefix): if len(argstr) < i + n: raise BadSyntax( self._thisDoc, startline, argstr, i, "unterminated string literal(3)" ) try: return i + n, reg.sub(unicodeExpand, "\\" + prefix + argstr[i : i + n]) except: raise BadSyntax( self._thisDoc, startline, argstr, i, "bad string literal hex escape: " + argstr[i : i + n], ) def uEscape(self, argstr, i, startline): return self._unicodeEscape(argstr, i, startline, unicodeEscape4, 4, "u") def UEscape(self, argstr, i, startline): return self._unicodeEscape(argstr, i, startline, unicodeEscape8, 8, "U") def BadSyntax(self, argstr, i, msg): raise BadSyntax(self._thisDoc, self.lines, argstr, i, msg) # If we are going to do operators then they should generate # [ is operator:plus of ( \1 \2 ) ] class BadSyntax(SyntaxError): def __init__(self, uri, lines, argstr, i, why): self._str = argstr.encode("utf-8") # Better go back to strings for errors self._i = i self._why = why self.lines = lines self._uri = uri def __str__(self): argstr = self._str i = self._i st = 0 if i > 60: pre = "..." st = i - 60 else: pre = "" if len(argstr) - i > 60: post = "..." else: post = "" return 'at line %i of <%s>:\nBad syntax (%s) at ^ in:\n"%s%s^%s%s"' % ( self.lines + 1, self._uri, self._why, pre, argstr[st:i], argstr[i : i + 60], post, ) @property def message(self): return str(self) ############################################################################### class Formula(object): number = 0 def __init__(self, parent): self.uuid = uuid4().hex self.counter = 0 Formula.number += 1 self.number = Formula.number self.existentials = {} self.universals = {} self.quotedgraph = QuotedGraph(store=parent.store, identifier=self.id()) def __str__(self): return "_:Formula%s" % self.number def id(self): return BNode("_:Formula%s" % self.number) def newBlankNode(self, uri=None, why=None): if uri is None: self.counter += 1 bn = BNode("f%sb%s" % (self.uuid, self.counter)) else: bn = BNode(uri.split("#").pop().replace("_", "b")) return bn def newUniversal(self, uri, why=None): return Variable(uri.split("#").pop()) def declareExistential(self, x): self.existentials[x] = self.newBlankNode() def close(self): return self.quotedgraph r_hibyte = re.compile(r"([\x80-\xff])") class RDFSink(object): def __init__(self, graph): self.rootFormula = None self.uuid = uuid4().hex self.counter = 0 self.graph = graph def newFormula(self): fa = getattr(self.graph.store, "formula_aware", False) if not fa: raise ParserError( "Cannot create formula parser with non-formula-aware store." ) f = Formula(self.graph) return f def newGraph(self, identifier): return Graph(self.graph.store, identifier) def newSymbol(self, *args): return URIRef(args[0]) def newBlankNode(self, arg=None, uri=None, why=None): if isinstance(arg, Formula): return arg.newBlankNode(uri) elif isinstance(arg, Graph) or arg is None: self.counter += 1 bn = BNode("n%sb%s" % (self.uuid, self.counter)) else: bn = BNode(str(arg[0]).split("#").pop().replace("_", "b")) return bn def newLiteral(self, s, dt, lang): if dt: return Literal(s, datatype=dt) else: return Literal(s, lang=lang) def newList(self, n, f): nil = self.newSymbol("http://www.w3.org/1999/02/22-rdf-syntax-ns#nil") if not n: return nil first = self.newSymbol("http://www.w3.org/1999/02/22-rdf-syntax-ns#first") rest = self.newSymbol("http://www.w3.org/1999/02/22-rdf-syntax-ns#rest") af = a = self.newBlankNode(f) for ne in n[:-1]: self.makeStatement((f, first, a, ne)) an = self.newBlankNode(f) self.makeStatement((f, rest, a, an)) a = an self.makeStatement((f, first, a, n[-1])) self.makeStatement((f, rest, a, nil)) return af def newSet(self, *args): return set(args) def setDefaultNamespace(self, *args): return ":".join(repr(n) for n in args) def makeStatement(self, quadruple, why=None): f, p, s, o = quadruple if hasattr(p, "formula"): raise ParserError("Formula used as predicate") s = self.normalise(f, s) p = self.normalise(f, p) o = self.normalise(f, o) if f == self.rootFormula: # print s, p, o, '.' self.graph.add((s, p, o)) elif isinstance(f, Formula): f.quotedgraph.add((s, p, o)) else: f.add((s, p, o)) # return str(quadruple) def normalise(self, f, n): if isinstance(n, tuple): return URIRef(str(n[1])) if isinstance(n, bool): s = Literal(str(n).lower(), datatype=BOOLEAN_DATATYPE) return s if isinstance(n, int) or isinstance(n, long_type): s = Literal(str(n), datatype=INTEGER_DATATYPE) return s if isinstance(n, Decimal): value = str(n) if value == "-0": value = "0" s = Literal(value, datatype=DECIMAL_DATATYPE) return s if isinstance(n, float): s = Literal(str(n), datatype=DOUBLE_DATATYPE) return s if isinstance(f, Formula): if n in f.existentials: return f.existentials[n] # if isinstance(n, Var): # if f.universals.has_key(n): # return f.universals[n] # f.universals[n] = f.newBlankNode() # return f.universals[n] return n def intern(self, something): return something def bind(self, pfx, uri): pass # print pfx, ':', uri def startDoc(self, formula): self.rootFormula = formula def endDoc(self, formula): pass ################################################### # # Utilities # def hexify(ustr): """Use URL encoding to return an ASCII string corresponding to the given UTF8 string >>> hexify("http://example/a b") b'http://example/a%20b' """ # s1=ustr.encode('utf-8') s = "" for ch in ustr: # .encode('utf-8'): if ord(ch) > 126 or ord(ch) < 33: ch = "%%%02X" % ord(ch) else: ch = "%c" % ord(ch) s = s + ch return s.encode("latin-1") class TurtleParser(Parser): """ An RDFLib parser for Turtle See http://www.w3.org/TR/turtle/ """ def __init__(self): pass def parse(self, source, graph, encoding="utf-8", turtle=True): if encoding not in [None, "utf-8"]: raise ParserError( "N3/Turtle files are always utf-8 encoded, I was passed: %s" % encoding ) sink = RDFSink(graph) baseURI = graph.absolutize(source.getPublicId() or source.getSystemId() or "") p = SinkParser(sink, baseURI=baseURI, turtle=turtle) # N3 parser prefers str stream stream = source.getCharacterStream() if not stream: stream = source.getByteStream() p.loadStream(stream) for prefix, namespace in p._bindings.items(): graph.bind(prefix, namespace) class N3Parser(TurtleParser): """ An RDFLib parser for Notation3 See http://www.w3.org/DesignIssues/Notation3.html """ def __init__(self): pass def parse(self, source, graph, encoding="utf-8"): # we're currently being handed a Graph, not a ConjunctiveGraph # context-aware is this implied by formula_aware ca = getattr(graph.store, "context_aware", False) fa = getattr(graph.store, "formula_aware", False) if not ca: raise ParserError("Cannot parse N3 into non-context-aware store.") elif not fa: raise ParserError("Cannot parse N3 into non-formula-aware store.") conj_graph = ConjunctiveGraph(store=graph.store) conj_graph.default_context = graph # TODO: CG __init__ should have a # default_context arg # TODO: update N3Processor so that it can use conj_graph as the sink conj_graph.namespace_manager = graph.namespace_manager TurtleParser.parse(self, source, conj_graph, encoding, turtle=False) def _test(): # pragma: no cover import doctest doctest.testmod() def main(): # pragma: no cover g = ConjunctiveGraph() sink = RDFSink(g) base_uri = "file://" + os.path.join(os.getcwd(), sys.argv[1]) p = SinkParser(sink, baseURI=base_uri) p._bindings[""] = p._baseURI + "#" p.startDoc() f = open(sys.argv[1], "rb") rdbytes = f.read() f.close() p.feed(rdbytes) p.endDoc() for t in g.quads((None, None, None)): print(t) if __name__ == "__main__": main() rdflib-6.1.1/rdflib/plugins/parsers/nquads.py000066400000000000000000000070511415774155300212470ustar00rootroot00000000000000""" This is a rdflib plugin for parsing NQuad files into Conjunctive graphs that can be used and queried. The store that backs the graph *must* be able to handle contexts. >>> from rdflib import ConjunctiveGraph, URIRef, Namespace >>> g = ConjunctiveGraph() >>> data = open("test/nquads.rdflib/example.nquads", "rb") >>> g.parse(data, format="nquads") # doctest:+ELLIPSIS )> >>> assert len(g.store) == 449 >>> # There should be 16 separate contexts >>> assert len([x for x in g.store.contexts()]) == 16 >>> # is the name of entity E10009 "Arco Publications"? >>> # (in graph http://bibliographica.org/entity/E10009) >>> # Looking for: >>> # >>> # >>> # "Arco Publications" >>> # >>> s = URIRef("http://bibliographica.org/entity/E10009") >>> FOAF = Namespace("http://xmlns.com/foaf/0.1/") >>> assert(g.value(s, FOAF.name).eq("Arco Publications")) """ from codecs import getreader from rdflib import ConjunctiveGraph # Build up from the NTriples parser: from rdflib.plugins.parsers.ntriples import W3CNTriplesParser from rdflib.plugins.parsers.ntriples import ParseError from rdflib.plugins.parsers.ntriples import r_tail from rdflib.plugins.parsers.ntriples import r_wspace __all__ = ["NQuadsParser"] class NQuadsParser(W3CNTriplesParser): def parse(self, inputsource, sink, bnode_context=None, **kwargs): """ Parse inputsource as an N-Quads file. :type inputsource: `rdflib.parser.InputSource` :param inputsource: the source of N-Quads-formatted data :type sink: `rdflib.graph.Graph` :param sink: where to send parsed triples :type bnode_context: `dict`, optional :param bnode_context: a dict mapping blank node identifiers to `~rdflib.term.BNode` instances. See `.NTriplesParser.parse` """ assert sink.store.context_aware, ( "NQuadsParser must be given" " a context aware store." ) self.sink = ConjunctiveGraph(store=sink.store, identifier=sink.identifier) source = inputsource.getCharacterStream() if not source: source = inputsource.getByteStream() source = getreader("utf-8")(source) if not hasattr(source, "read"): raise ParseError("Item to parse must be a file-like object.") self.file = source self.buffer = "" while True: self.line = __line = self.readline() if self.line is None: break try: self.parseline(bnode_context) except ParseError as msg: raise ParseError("Invalid line (%s):\n%r" % (msg, __line)) return self.sink def parseline(self, bnode_context=None): self.eat(r_wspace) if (not self.line) or self.line.startswith(("#")): return # The line is empty or a comment subject = self.subject(bnode_context) self.eat(r_wspace) predicate = self.predicate() self.eat(r_wspace) obj = self.object(bnode_context) self.eat(r_wspace) context = self.uriref() or self.nodeid(bnode_context) or self.sink.identifier self.eat(r_tail) if self.line: raise ParseError("Trailing garbage") # Must have a context aware store - add on a normal Graph # discards anything where the ctx != graph.identifier self.sink.get_context(context).add((subject, predicate, obj)) rdflib-6.1.1/rdflib/plugins/parsers/ntriples.py000066400000000000000000000244731415774155300216230ustar00rootroot00000000000000#!/usr/bin/env python3 __doc__ = """\ N-Triples Parser License: GPL 2, W3C, BSD, or MIT Author: Sean B. Palmer, inamidst.com """ import re import codecs from rdflib.term import URIRef as URI from rdflib.term import BNode as bNode from rdflib.term import Literal from rdflib.compat import decodeUnicodeEscape from rdflib.exceptions import ParserError as ParseError from rdflib.parser import Parser from io import StringIO, TextIOBase, BytesIO __all__ = ["unquote", "uriquote", "W3CNTriplesParser", "NTGraphSink", "NTParser"] uriref = r'<([^:]+:[^\s"<>]*)>' literal = r'"([^"\\]*(?:\\.[^"\\]*)*)"' litinfo = r"(?:@([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)|\^\^" + uriref + r")?" r_line = re.compile(r"([^\r\n]*)(?:\r\n|\r|\n)") r_wspace = re.compile(r"[ \t]*") r_wspaces = re.compile(r"[ \t]+") r_tail = re.compile(r"[ \t]*\.[ \t]*(#.*)?") r_uriref = re.compile(uriref) r_nodeid = re.compile(r"_:([A-Za-z0-9_:]([-A-Za-z0-9_:\.]*[-A-Za-z0-9_:])?)") r_literal = re.compile(literal + litinfo) bufsiz = 2048 validate = False class DummySink(object): def __init__(self): self.length = 0 def triple(self, s, p, o): self.length += 1 print(s, p, o) quot = {"t": "\t", "n": "\n", "r": "\r", '"': '"', "\\": "\\"} r_safe = re.compile(r"([\x20\x21\x23-\x5B\x5D-\x7E]+)") r_quot = re.compile(r'\\(t|n|r|"|\\)') r_uniquot = re.compile(r"\\u([0-9A-F]{4})|\\U([0-9A-F]{8})") def unquote(s): """Unquote an N-Triples string.""" if not validate: if isinstance(s, str): # nquads s = decodeUnicodeEscape(s) else: s = s.decode("unicode-escape") return s else: result = [] while s: m = r_safe.match(s) if m: s = s[m.end() :] result.append(m.group(1)) continue m = r_quot.match(s) if m: s = s[2:] result.append(quot[m.group(1)]) continue m = r_uniquot.match(s) if m: s = s[m.end() :] u, U = m.groups() codepoint = int(u or U, 16) if codepoint > 0x10FFFF: raise ParseError("Disallowed codepoint: %08X" % codepoint) result.append(chr(codepoint)) elif s.startswith("\\"): raise ParseError("Illegal escape at: %s..." % s[:10]) else: raise ParseError("Illegal literal character: %r" % s[0]) return "".join(result) r_hibyte = re.compile(r"([\x80-\xFF])") def uriquote(uri): if not validate: return uri else: return r_hibyte.sub(lambda m: "%%%02X" % ord(m.group(1)), uri) class W3CNTriplesParser(object): """An N-Triples Parser. This is a legacy-style Triples parser for NTriples provided by W3C Usage:: p = NTriplesParser(sink=MySink()) sink = p.parse(f) # file; use parsestring for a string To define a context in which blank node identifiers refer to the same blank node across instances of NTriplesParser, pass the same dict as `bnode_context` to each instance. By default, a new blank node context is created for each instance of `NTriplesParser`. """ __slots__ = ("_bnode_ids", "sink", "buffer", "file", "line") def __init__(self, sink=None, bnode_context=None): if bnode_context is not None: self._bnode_ids = bnode_context else: self._bnode_ids = {} if sink is not None: self.sink = sink else: self.sink = DummySink() self.buffer = None self.file = None self.line = "" def parse(self, f, bnode_context=None): """ Parse f as an N-Triples file. :type f: :term:`file object` :param f: the N-Triples source :type bnode_context: `dict`, optional :param bnode_context: a dict mapping blank node identifiers (e.g., ``a`` in ``_:a``) to `~rdflib.term.BNode` instances. An empty dict can be passed in to define a distinct context for a given call to `parse`. """ if not hasattr(f, "read"): raise ParseError("Item to parse must be a file-like object.") if not hasattr(f, "encoding") and not hasattr(f, "charbuffer"): # someone still using a bytestream here? f = codecs.getreader("utf-8")(f) self.file = f self.buffer = "" while True: self.line = self.readline() if self.line is None: break try: self.parseline(bnode_context=bnode_context) except ParseError: raise ParseError("Invalid line: {}".format(self.line)) return self.sink def parsestring(self, s, **kwargs): """Parse s as an N-Triples string.""" if not isinstance(s, (str, bytes, bytearray)): raise ParseError("Item to parse must be a string instance.") if isinstance(s, (bytes, bytearray)): f = codecs.getreader("utf-8")(BytesIO(s)) else: f = StringIO(s) self.parse(f, **kwargs) def readline(self): """Read an N-Triples line from buffered input.""" # N-Triples lines end in either CRLF, CR, or LF # Therefore, we can't just use f.readline() if not self.buffer: buffer = self.file.read(bufsiz) if not buffer: return None self.buffer = buffer while True: m = r_line.match(self.buffer) if m: # the more likely prospect self.buffer = self.buffer[m.end() :] return m.group(1) else: buffer = self.file.read(bufsiz) if not buffer and not self.buffer.isspace(): # Last line does not need to be terminated with a newline buffer += "\n" elif not buffer: return None self.buffer += buffer def parseline(self, bnode_context=None): self.eat(r_wspace) if (not self.line) or self.line.startswith("#"): return # The line is empty or a comment subject = self.subject(bnode_context) self.eat(r_wspaces) predicate = self.predicate() self.eat(r_wspaces) object_ = self.object(bnode_context) self.eat(r_tail) if self.line: raise ParseError("Trailing garbage: {}".format(self.line)) self.sink.triple(subject, predicate, object_) def peek(self, token): return self.line.startswith(token) def eat(self, pattern): m = pattern.match(self.line) if not m: # @@ Why can't we get the original pattern? # print(dir(pattern)) # print repr(self.line), type(self.line) raise ParseError("Failed to eat %s at %s" % (pattern.pattern, self.line)) self.line = self.line[m.end() :] return m def subject(self, bnode_context=None): # @@ Consider using dictionary cases subj = self.uriref() or self.nodeid(bnode_context) if not subj: raise ParseError("Subject must be uriref or nodeID") return subj def predicate(self): pred = self.uriref() if not pred: raise ParseError("Predicate must be uriref") return pred def object(self, bnode_context=None): objt = self.uriref() or self.nodeid(bnode_context) or self.literal() if objt is False: raise ParseError("Unrecognised object type") return objt def uriref(self): if self.peek("<"): uri = self.eat(r_uriref).group(1) uri = unquote(uri) uri = uriquote(uri) return URI(uri) return False def nodeid(self, bnode_context=None): if self.peek("_"): # Fix for https://github.com/RDFLib/rdflib/issues/204 if bnode_context is None: bnode_context = self._bnode_ids bnode_id = self.eat(r_nodeid).group(1) new_id = bnode_context.get(bnode_id, None) if new_id is not None: # Re-map to id specific to this doc return bNode(new_id) else: # Replace with freshly-generated document-specific BNode id bnode = bNode() # Store the mapping bnode_context[bnode_id] = bnode return bnode return False def literal(self): if self.peek('"'): lit, lang, dtype = self.eat(r_literal).groups() if lang: lang = lang else: lang = None if dtype: dtype = unquote(dtype) dtype = uriquote(dtype) dtype = URI(dtype) else: dtype = None if lang and dtype: raise ParseError("Can't have both a language and a datatype") lit = unquote(lit) return Literal(lit, lang, dtype) return False class NTGraphSink(object): __slots__ = ("g",) def __init__(self, graph): self.g = graph def triple(self, s, p, o): self.g.add((s, p, o)) class NTParser(Parser): """parser for the ntriples format, often stored with the .nt extension See http://www.w3.org/TR/rdf-testcases/#ntriples""" __slots__ = () @classmethod def parse(cls, source, sink, **kwargs): """ Parse the NT format :type source: `rdflib.parser.InputSource` :param source: the source of NT-formatted data :type sink: `rdflib.graph.Graph` :param sink: where to send parsed triples :param kwargs: Additional arguments to pass to `.NTriplesParser.parse` """ f = source.getCharacterStream() if not f: b = source.getByteStream() # TextIOBase includes: StringIO and TextIOWrapper if isinstance(b, TextIOBase): # f is not really a ByteStream, but a CharacterStream f = b else: # since N-Triples 1.1 files can and should be utf-8 encoded f = codecs.getreader("utf-8")(b) parser = W3CNTriplesParser(NTGraphSink(sink)) parser.parse(f, **kwargs) f.close() rdflib-6.1.1/rdflib/plugins/parsers/rdfxml.py000066400000000000000000000514241415774155300212530ustar00rootroot00000000000000""" An RDF/XML parser for RDFLib """ from xml.sax import make_parser, handler, xmlreader from xml.sax.handler import ErrorHandler from xml.sax.saxutils import quoteattr, escape from urllib.parse import urldefrag, urljoin from rdflib.namespace import is_ncname from rdflib.namespace import RDF from rdflib.plugins.parsers.RDFVOC import RDFVOC from rdflib.term import URIRef from rdflib.term import BNode from rdflib.term import Literal from rdflib.exceptions import ParserError, Error from rdflib.parser import Parser __all__ = ["create_parser", "BagID", "ElementHandler", "RDFXMLHandler", "RDFXMLParser"] RDFNS = RDFVOC # http://www.w3.org/TR/rdf-syntax-grammar/#eventterm-attribute-URI # A mapping from unqualified terms to their qualified version. UNQUALIFIED = { "about": RDFVOC.about, "ID": RDFVOC.ID, "type": RDFVOC.type, "resource": RDFVOC.resource, "parseType": RDFVOC.parseType, } # http://www.w3.org/TR/rdf-syntax-grammar/#coreSyntaxTerms CORE_SYNTAX_TERMS = [ RDFVOC.RDF, RDFVOC.ID, RDFVOC.about, RDFVOC.parseType, RDFVOC.resource, RDFVOC.nodeID, RDFVOC.datatype, ] # http://www.w3.org/TR/rdf-syntax-grammar/#syntaxTerms SYNTAX_TERMS = CORE_SYNTAX_TERMS + [RDFVOC.Description, RDFVOC.li] # http://www.w3.org/TR/rdf-syntax-grammar/#oldTerms OLD_TERMS = [ URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#aboutEach"), URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#aboutEachPrefix"), URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#bagID"), ] NODE_ELEMENT_EXCEPTIONS = ( CORE_SYNTAX_TERMS + [ RDFVOC.li, ] + OLD_TERMS ) NODE_ELEMENT_ATTRIBUTES = [RDFVOC.ID, RDFVOC.nodeID, RDFVOC.about] PROPERTY_ELEMENT_EXCEPTIONS = ( CORE_SYNTAX_TERMS + [ RDFVOC.Description, ] + OLD_TERMS ) PROPERTY_ATTRIBUTE_EXCEPTIONS = ( CORE_SYNTAX_TERMS + [RDFVOC.Description, RDFVOC.li] + OLD_TERMS ) PROPERTY_ELEMENT_ATTRIBUTES = [RDFVOC.ID, RDFVOC.resource, RDFVOC.nodeID] XMLNS = "http://www.w3.org/XML/1998/namespace" BASE = (XMLNS, "base") LANG = (XMLNS, "lang") class BagID(URIRef): __slots__ = ["li"] def __init__(self, val): super(URIRef, self).__init__(val) self.li = 0 def next_li(self): self.li += 1 return RDFNS["_%s" % self.li] class ElementHandler(object): __slots__ = [ "start", "char", "end", "li", "id", "base", "subject", "predicate", "object", "list", "language", "datatype", "declared", "data", ] def __init__(self): self.start = None self.char = None self.end = None self.li = 0 self.id = None self.base = None self.subject = None self.object = None self.list = None self.language = None self.datatype = None self.declared = None self.data = None def next_li(self): self.li += 1 return RDFVOC["_%s" % self.li] class RDFXMLHandler(handler.ContentHandler): def __init__(self, store): self.store = store self.preserve_bnode_ids = False self.reset() def reset(self): document_element = ElementHandler() document_element.start = self.document_element_start document_element.end = lambda name, qname: None self.stack = [ None, document_element, ] self.ids = {} # remember IDs we have already seen self.bnode = {} self._ns_contexts = [{}] # contains uri -> prefix dicts self._current_context = self._ns_contexts[-1] # ContentHandler methods def setDocumentLocator(self, locator): self.locator = locator def startDocument(self): pass def startPrefixMapping(self, prefix, namespace): self._ns_contexts.append(self._current_context.copy()) self._current_context[namespace] = prefix self.store.bind(prefix, namespace or "", override=False) def endPrefixMapping(self, prefix): self._current_context = self._ns_contexts[-1] del self._ns_contexts[-1] def startElementNS(self, name, qname, attrs): stack = self.stack stack.append(ElementHandler()) current = self.current parent = self.parent base = attrs.get(BASE, None) if base is not None: base, frag = urldefrag(base) if parent and parent.base: base = urljoin(parent.base, base) else: systemId = self.locator.getPublicId() or self.locator.getSystemId() if systemId: base = urljoin(systemId, base) else: if parent: base = parent.base if base is None: systemId = self.locator.getPublicId() or self.locator.getSystemId() if systemId: base, frag = urldefrag(systemId) current.base = base language = attrs.get(LANG, None) if language is None: if parent: language = parent.language current.language = language current.start(name, qname, attrs) def endElementNS(self, name, qname): self.current.end(name, qname) self.stack.pop() def characters(self, content): char = self.current.char if char: char(content) def ignorableWhitespace(self, content): pass def processingInstruction(self, target, data): pass def add_reified(self, sid, spo): s, p, o = spo self.store.add((sid, RDF.type, RDF.Statement)) self.store.add((sid, RDF.subject, s)) self.store.add((sid, RDF.predicate, p)) self.store.add((sid, RDF.object, o)) def error(self, message): locator = self.locator info = "%s:%s:%s: " % ( locator.getSystemId(), locator.getLineNumber(), locator.getColumnNumber(), ) raise ParserError(info + message) def get_current(self): return self.stack[-2] # Create a read only property called current so that self.current # give the current element handler. current = property(get_current) def get_next(self): return self.stack[-1] # Create a read only property that gives the element handler to be # used for the next element. next = property(get_next) def get_parent(self): return self.stack[-3] # Create a read only property that gives the current parent # element handler parent = property(get_parent) def absolutize(self, uri): result = urljoin(self.current.base, uri, allow_fragments=1) if uri and uri[-1] == "#" and result[-1] != "#": result = "%s#" % result return URIRef(result) def convert(self, name, qname, attrs): if name[0] is None: name = URIRef(name[1]) else: name = URIRef("".join(name)) atts = {} for (n, v) in attrs.items(): if n[0] is None: att = n[1] else: att = "".join(n) if att.startswith(XMLNS) or att[0:3].lower() == "xml": pass elif att in UNQUALIFIED: # if not RDFNS[att] in atts: atts[RDFNS[att]] = v # type: ignore[misc] else: atts[URIRef(att)] = v return name, atts def document_element_start(self, name, qname, attrs): if name[0] and URIRef("".join(name)) == RDFVOC.RDF: # Cheap hack so 2to3 doesn't turn it into __next__ next = getattr(self, "next") next.start = self.node_element_start next.end = self.node_element_end else: self.node_element_start(name, qname, attrs) # self.current.end = self.node_element_end # TODO... set end to something that sets start such that # another element will cause error def node_element_start(self, name, qname, attrs): name, atts = self.convert(name, qname, attrs) current = self.current absolutize = self.absolutize # Cheap hack so 2to3 doesn't turn it into __next__ next = getattr(self, "next") next.start = self.property_element_start next.end = self.property_element_end if name in NODE_ELEMENT_EXCEPTIONS: self.error("Invalid node element URI: %s" % name) if RDFVOC.ID in atts: if RDFVOC.about in atts or RDFVOC.nodeID in atts: self.error("Can have at most one of rdf:ID, rdf:about, and rdf:nodeID") id = atts[RDFVOC.ID] if not is_ncname(id): self.error("rdf:ID value is not a valid NCName: %s" % id) subject = absolutize("#%s" % id) if subject in self.ids: self.error("two elements cannot use the same ID: '%s'" % subject) self.ids[subject] = 1 # IDs can only appear once within a document elif RDFVOC.nodeID in atts: if RDFVOC.ID in atts or RDFVOC.about in atts: self.error("Can have at most one of rdf:ID, rdf:about, and rdf:nodeID") nodeID = atts[RDFVOC.nodeID] if not is_ncname(nodeID): self.error("rdf:nodeID value is not a valid NCName: %s" % nodeID) if self.preserve_bnode_ids is False: if nodeID in self.bnode: subject = self.bnode[nodeID] else: subject = BNode() self.bnode[nodeID] = subject else: subject = BNode(nodeID) elif RDFVOC.about in atts: if RDFVOC.ID in atts or RDFVOC.nodeID in atts: self.error("Can have at most one of rdf:ID, rdf:about, and rdf:nodeID") subject = absolutize(atts[RDFVOC.about]) else: subject = BNode() if name != RDFVOC.Description: # S1 self.store.add((subject, RDF.type, absolutize(name))) language = current.language for att in atts: if not att.startswith(str(RDFNS)): predicate = absolutize(att) try: object = Literal(atts[att], language) except Error as e: self.error(e.msg) elif att == RDF.type: # S2 predicate = RDF.type object = absolutize(atts[RDF.type]) elif att in NODE_ELEMENT_ATTRIBUTES: continue elif att in PROPERTY_ATTRIBUTE_EXCEPTIONS: # S3 self.error("Invalid property attribute URI: %s" % att) continue # for when error does not throw an exception else: predicate = absolutize(att) try: object = Literal(atts[att], language) except Error as e: self.error(e.msg) self.store.add((subject, predicate, object)) current.subject = subject def node_element_end(self, name, qname): # repeat node-elements are only allowed # at at top-level if self.parent.object and self.current != self.stack[2]: self.error( "Repeat node-elements inside property elements: %s" % "".join(name) ) self.parent.object = self.current.subject def property_element_start(self, name, qname, attrs): name, atts = self.convert(name, qname, attrs) current = self.current absolutize = self.absolutize # Cheap hack so 2to3 doesn't turn it into __next__ next = getattr(self, "next") object = None current.data = None current.list = None if not name.startswith(str(RDFNS)): current.predicate = absolutize(name) elif name == RDFVOC.li: current.predicate = current.next_li() elif name in PROPERTY_ELEMENT_EXCEPTIONS: self.error("Invalid property element URI: %s" % name) else: current.predicate = absolutize(name) id = atts.get(RDFVOC.ID, None) if id is not None: if not is_ncname(id): self.error("rdf:ID value is not a value NCName: %s" % id) current.id = absolutize("#%s" % id) else: current.id = None resource = atts.get(RDFVOC.resource, None) nodeID = atts.get(RDFVOC.nodeID, None) parse_type = atts.get(RDFVOC.parseType, None) if resource is not None and nodeID is not None: self.error("Property element cannot have both rdf:nodeID and rdf:resource") if resource is not None: object = absolutize(resource) next.start = self.node_element_start next.end = self.node_element_end elif nodeID is not None: if not is_ncname(nodeID): self.error("rdf:nodeID value is not a valid NCName: %s" % nodeID) if self.preserve_bnode_ids is False: if nodeID in self.bnode: object = self.bnode[nodeID] else: subject = BNode() self.bnode[nodeID] = subject object = subject else: object = subject = BNode(nodeID) next.start = self.node_element_start next.end = self.node_element_end else: if parse_type is not None: for att in atts: if att != RDFVOC.parseType and att != RDFVOC.ID: self.error("Property attr '%s' now allowed here" % att) if parse_type == "Resource": current.subject = object = BNode() current.char = self.property_element_char next.start = self.property_element_start next.end = self.property_element_end elif parse_type == "Collection": current.char = None object = current.list = RDF.nil # BNode() # self.parent.subject next.start = self.node_element_start next.end = self.list_node_element_end else: # if parse_type=="Literal": # All other values are treated as Literal # See: http://www.w3.org/TR/rdf-syntax-grammar/ # parseTypeOtherPropertyElt object = Literal("", datatype=RDFVOC.XMLLiteral) current.char = self.literal_element_char current.declared = {XMLNS: "xml"} next.start = self.literal_element_start next.char = self.literal_element_char next.end = self.literal_element_end current.object = object return else: object = None current.char = self.property_element_char next.start = self.node_element_start next.end = self.node_element_end datatype = current.datatype = atts.get(RDFVOC.datatype, None) language = current.language if datatype is not None: # TODO: check that there are no atts other than datatype and id datatype = absolutize(datatype) else: for att in atts: if not att.startswith(str(RDFNS)): predicate = absolutize(att) elif att in PROPERTY_ELEMENT_ATTRIBUTES: continue elif att in PROPERTY_ATTRIBUTE_EXCEPTIONS: self.error("""Invalid property attribute URI: %s""" % att) else: predicate = absolutize(att) if att == RDF.type: o = URIRef(atts[att]) else: if datatype is not None: language = None # type: ignore[unreachable] o = Literal(atts[att], language, datatype) if object is None: object = BNode() self.store.add((object, predicate, o)) if object is None: current.data = "" current.object = None else: current.data = None current.object = object def property_element_char(self, data): current = self.current if current.data is not None: current.data += data def property_element_end(self, name, qname): current = self.current if current.data is not None and current.object is None: literalLang = current.language if current.datatype is not None: literalLang = None current.object = Literal(current.data, literalLang, current.datatype) current.data = None if self.next.end == self.list_node_element_end: if current.object != RDF.nil: self.store.add((current.list, RDF.rest, RDF.nil)) if current.object is not None: self.store.add((self.parent.subject, current.predicate, current.object)) if current.id is not None: self.add_reified( current.id, (self.parent.subject, current.predicate, current.object) ) current.subject = None def list_node_element_end(self, name, qname): current = self.current if self.parent.list == RDF.nil: list = BNode() # Removed between 20030123 and 20030905 # self.store.add((list, RDF.type, LIST)) self.parent.list = list self.store.add((self.parent.list, RDF.first, current.subject)) self.parent.object = list self.parent.char = None else: list = BNode() # Removed between 20030123 and 20030905 # self.store.add((list, RDF.type, LIST)) self.store.add((self.parent.list, RDF.rest, list)) self.store.add((list, RDF.first, current.subject)) self.parent.list = list def literal_element_start(self, name, qname, attrs): current = self.current self.next.start = self.literal_element_start self.next.char = self.literal_element_char self.next.end = self.literal_element_end current.declared = self.parent.declared.copy() if name[0]: prefix = self._current_context[name[0]] if prefix: current.object = "<%s:%s" % (prefix, name[1]) else: current.object = "<%s" % name[1] if not name[0] in current.declared: current.declared[name[0]] = prefix if prefix: current.object += ' xmlns:%s="%s"' % (prefix, name[0]) else: current.object += ' xmlns="%s"' % name[0] else: current.object = "<%s" % name[1] for (name, value) in attrs.items(): if name[0]: if not name[0] in current.declared: current.declared[name[0]] = self._current_context[name[0]] name = current.declared[name[0]] + ":" + name[1] else: name = name[1] current.object += " %s=%s" % (name, quoteattr(value)) current.object += ">" def literal_element_char(self, data): self.current.object += escape(data) def literal_element_end(self, name, qname): if name[0]: prefix = self._current_context[name[0]] if prefix: end = "" % (prefix, name[1]) else: end = "" % name[1] else: end = "" % name[1] self.parent.object += self.current.object + end def create_parser(target, store) -> xmlreader.XMLReader: parser = make_parser() try: # Workaround for bug in expatreader.py. Needed when # expatreader is trying to guess a prefix. parser.start_namespace_decl("xml", "http://www.w3.org/XML/1998/namespace") # type: ignore[attr-defined] except AttributeError: pass # Not present in Jython (at least) parser.setFeature(handler.feature_namespaces, 1) rdfxml = RDFXMLHandler(store) rdfxml.setDocumentLocator(target) # rdfxml.setDocumentLocator(_Locator(self.url, self.parser)) parser.setContentHandler(rdfxml) parser.setErrorHandler(ErrorHandler()) return parser class RDFXMLParser(Parser): def __init__(self): pass def parse(self, source, sink, **args): self._parser = create_parser(source, sink) content_handler = self._parser.getContentHandler() preserve_bnode_ids = args.get("preserve_bnode_ids", None) if preserve_bnode_ids is not None: content_handler.preserve_bnode_ids = preserve_bnode_ids # # We're only using it once now # content_handler.reset() # self._parser.reset() self._parser.parse(source) rdflib-6.1.1/rdflib/plugins/parsers/trig.py000066400000000000000000000107051415774155300207210ustar00rootroot00000000000000from rdflib import ConjunctiveGraph from rdflib.parser import Parser from .notation3 import SinkParser, RDFSink def becauseSubGraph(*args, **kwargs): pass class TrigSinkParser(SinkParser): def directiveOrStatement(self, argstr, h): # import pdb; pdb.set_trace() i = self.skipSpace(argstr, h) if i < 0: return i # EOF j = self.graph(argstr, i) if j >= 0: return j j = self.sparqlDirective(argstr, i) if j >= 0: return j j = self.directive(argstr, i) if j >= 0: return self.checkDot(argstr, j) j = self.statement(argstr, i) if j >= 0: return self.checkDot(argstr, j) return j def labelOrSubject(self, argstr, i, res): j = self.skipSpace(argstr, i) if j < 0: return j # eof i = j j = self.uri_ref2(argstr, i, res) if j >= 0: return j if argstr[i] == "[": j = self.skipSpace(argstr, i + 1) if j < 0: self.BadSyntax(argstr, i, "Expected ] got EOF") if argstr[j] == "]": res.append(self.blankNode()) return j + 1 return -1 def graph(self, argstr, i): """ Parse trig graph, i.e. = { .. triples .. } return -1 if it doesn't look like a graph-decl raise Exception if it looks like a graph, but isn't. """ # import pdb; pdb.set_trace() j = self.sparqlTok("GRAPH", argstr, i) # optional GRAPH keyword if j >= 0: i = j r = [] j = self.labelOrSubject(argstr, i, r) if j >= 0: graph = r[0] i = j else: graph = self._store.graph.identifier # hack j = self.skipSpace(argstr, i) if j < 0: self.BadSyntax(argstr, i, "EOF found when expected graph") if argstr[j : j + 1] == "=": # optional = for legacy support i = self.skipSpace(argstr, j + 1) if i < 0: self.BadSyntax(argstr, i, "EOF found when expecting '{'") else: i = j if argstr[i : i + 1] != "{": return -1 # the node wasn't part of a graph j = i + 1 oldParentContext = self._parentContext self._parentContext = self._context reason2 = self._reason2 self._reason2 = becauseSubGraph self._context = self._store.newGraph(graph) while 1: i = self.skipSpace(argstr, j) if i < 0: self.BadSyntax(argstr, i, "needed '}', found end.") if argstr[i : i + 1] == "}": j = i + 1 break j = self.directiveOrStatement(argstr, i) if j < 0: self.BadSyntax(argstr, i, "expected statement or '}'") self._context = self._parentContext self._reason2 = reason2 self._parentContext = oldParentContext # res.append(subj.close()) # No use until closed return j class TrigParser(Parser): """ An RDFLib parser for TriG """ def __init__(self): pass def parse(self, source, graph, encoding="utf-8"): if encoding not in [None, "utf-8"]: raise Exception( ("TriG files are always utf-8 encoded, ", "I was passed: %s") % encoding ) # we're currently being handed a Graph, not a ConjunctiveGraph assert graph.store.context_aware, "TriG Parser needs a context-aware store!" conj_graph = ConjunctiveGraph(store=graph.store, identifier=graph.identifier) conj_graph.default_context = graph # TODO: CG __init__ should have a # default_context arg # TODO: update N3Processor so that it can use conj_graph as the sink conj_graph.namespace_manager = graph.namespace_manager sink = RDFSink(conj_graph) baseURI = conj_graph.absolutize( source.getPublicId() or source.getSystemId() or "" ) p = TrigSinkParser(sink, baseURI=baseURI, turtle=True) stream = source.getCharacterStream() # try to get str stream first if not stream: # fallback to get the bytes stream stream = source.getByteStream() p.loadStream(stream) for prefix, namespace in p._bindings.items(): conj_graph.bind(prefix, namespace) # return ??? rdflib-6.1.1/rdflib/plugins/parsers/trix.py000066400000000000000000000206051415774155300207420ustar00rootroot00000000000000""" A TriX parser for RDFLib """ from rdflib.namespace import Namespace from rdflib.term import URIRef from rdflib.term import BNode from rdflib.term import Literal from rdflib.graph import Graph from rdflib.exceptions import ParserError from rdflib.parser import Parser from xml.sax.saxutils import handler from xml.sax import make_parser from xml.sax.handler import ErrorHandler __all__ = ["create_parser", "TriXHandler", "TriXParser"] TRIXNS = Namespace("http://www.w3.org/2004/03/trix/trix-1/") XMLNS = Namespace("http://www.w3.org/XML/1998/namespace") class TriXHandler(handler.ContentHandler): """An Sax Handler for TriX. See http://sw.nokia.com/trix/""" def __init__(self, store): self.store = store self.preserve_bnode_ids = False self.reset() def reset(self): self.bnode = {} self.graph = None self.triple = None self.state = 0 self.lang = None self.datatype = None # ContentHandler methods def setDocumentLocator(self, locator): self.locator = locator def startDocument(self): pass def startPrefixMapping(self, prefix, namespace): pass def endPrefixMapping(self, prefix): pass def startElementNS(self, name, qname, attrs): if name[0] != str(TRIXNS): self.error( "Only elements in the TriX namespace are allowed. %s!=%s" % (name[0], TRIXNS) ) if name[1] == "TriX": if self.state == 0: self.state = 1 else: self.error("Unexpected TriX element") elif name[1] == "graph": if self.state == 1: self.state = 2 else: self.error("Unexpected graph element") elif name[1] == "uri": if self.state == 2: # the context uri self.state = 3 elif self.state == 4: # part of a triple pass else: self.error("Unexpected uri element") elif name[1] == "triple": if self.state == 2: if self.graph is None: # anonymous graph, create one with random bnode id self.graph = Graph(store=self.store) # start of a triple self.triple = [] self.state = 4 else: self.error("Unexpected triple element") elif name[1] == "typedLiteral": if self.state == 4: # part of triple self.lang = None self.datatype = None try: self.lang = attrs.getValue((str(XMLNS), "lang")) except: # language not required - ignore pass try: self.datatype = attrs.getValueByQName("datatype") except KeyError: self.error("No required attribute 'datatype'") else: self.error("Unexpected typedLiteral element") elif name[1] == "plainLiteral": if self.state == 4: # part of triple self.lang = None self.datatype = None try: self.lang = attrs.getValue((str(XMLNS), "lang")) except: # language not required - ignore pass else: self.error("Unexpected plainLiteral element") elif name[1] == "id": if self.state == 2: # the context uri self.state = 3 elif self.state == 4: # part of triple pass else: self.error("Unexpected id element") else: self.error("Unknown element %s in TriX namespace" % name[1]) self.chars = "" def endElementNS(self, name, qname): if name[0] != str(TRIXNS): self.error( "Only elements in the TriX namespace are allowed. %s!=%s" % (name[0], TRIXNS) ) if name[1] == "uri": if self.state == 3: self.graph = Graph( store=self.store, identifier=URIRef(self.chars.strip()) ) self.state = 2 elif self.state == 4: self.triple += [URIRef(self.chars.strip())] else: self.error( "Illegal internal self.state - This should never " + "happen if the SAX parser ensures XML syntax correctness" ) elif name[1] == "id": if self.state == 3: self.graph = Graph( self.store, identifier=self.get_bnode(self.chars.strip()) ) self.state = 2 elif self.state == 4: self.triple += [self.get_bnode(self.chars.strip())] else: self.error( "Illegal internal self.state - This should never " + "happen if the SAX parser ensures XML syntax correctness" ) elif name[1] == "plainLiteral" or name[1] == "typedLiteral": if self.state == 4: self.triple += [ Literal(self.chars, lang=self.lang, datatype=self.datatype) ] else: self.error( "This should never happen if the SAX parser " + "ensures XML syntax correctness" ) elif name[1] == "triple": if self.state == 4: if len(self.triple) != 3: self.error( "Triple has wrong length, got %d elements: %s" % (len(self.triple), self.triple) ) self.graph.add(self.triple) # self.store.store.add(self.triple,context=self.graph) # self.store.addN([self.triple+[self.graph]]) self.state = 2 else: self.error( "This should never happen if the SAX parser " + "ensures XML syntax correctness" ) elif name[1] == "graph": self.graph = None self.state = 1 elif name[1] == "TriX": self.state = 0 else: self.error("Unexpected close element") def get_bnode(self, label): if self.preserve_bnode_ids: bn = BNode(label) else: if label in self.bnode: bn = self.bnode[label] else: bn = BNode(label) self.bnode[label] = bn return bn def characters(self, content): self.chars += content def ignorableWhitespace(self, content): pass def processingInstruction(self, target, data): pass def error(self, message): locator = self.locator info = "%s:%s:%s: " % ( locator.getSystemId(), locator.getLineNumber(), locator.getColumnNumber(), ) raise ParserError(info + message) def create_parser(store): parser = make_parser() try: # Workaround for bug in expatreader.py. Needed when # expatreader is trying to guess a prefix. parser.start_namespace_decl("xml", "http://www.w3.org/XML/1998/namespace") except AttributeError: pass # Not present in Jython (at least) parser.setFeature(handler.feature_namespaces, 1) trix = TriXHandler(store) parser.setContentHandler(trix) parser.setErrorHandler(ErrorHandler()) return parser class TriXParser(Parser): """A parser for TriX. See http://sw.nokia.com/trix/""" def __init__(self): pass def parse(self, source, sink, **args): assert ( sink.store.context_aware ), "TriXParser must be given a context aware store." self._parser = create_parser(sink.store) content_handler = self._parser.getContentHandler() preserve_bnode_ids = args.get("preserve_bnode_ids", None) if preserve_bnode_ids is not None: content_handler.preserve_bnode_ids = preserve_bnode_ids # We're only using it once now # content_handler.reset() # self._parser.reset() self._parser.parse(source) rdflib-6.1.1/rdflib/plugins/serializers/000077500000000000000000000000001415774155300202545ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/serializers/__init__.py000066400000000000000000000000001415774155300223530ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/serializers/hext.py000066400000000000000000000105401415774155300215760ustar00rootroot00000000000000""" HextuplesSerializer RDF graph serializer for RDFLib. See for details about the format. """ from typing import IO, Optional, Union from rdflib.graph import Graph, ConjunctiveGraph from rdflib.term import Literal, URIRef, Node, BNode from rdflib.serializer import Serializer from rdflib.namespace import RDF, XSD import warnings __all__ = ["HextuplesSerializer"] class HextuplesSerializer(Serializer): """ Serializes RDF graphs to NTriples format. """ def __init__(self, store: Union[Graph, ConjunctiveGraph]): self.default_context: Optional[Node] if isinstance(store, ConjunctiveGraph): self.contexts = list(store.contexts()) if store.default_context: self.default_context = store.default_context self.contexts.append(store.default_context) else: self.default_context = None else: self.contexts = [store] self.default_context = None Serializer.__init__(self, store) def serialize( self, stream: IO[bytes], base: Optional[str] = None, encoding: Optional[str] = "utf-8", **kwargs, ): if base is not None: warnings.warn( "base has no meaning for Hextuples serialization. " "I will ignore this value" ) if encoding not in [None, "utf-8"]: warnings.warn( f"Hextuples files are always utf-8 encoded. " f"I was passed: {encoding}, " "but I'm still going to use utf-8 anyway!" ) if self.store.formula_aware is True: raise Exception( "Hextuple serialization can't (yet) handle formula-aware stores" ) for context in self.contexts: for triple in context: hl = self._hex_line(triple, context) if hl is not None: stream.write(hl.encode()) def _hex_line(self, triple, context): if isinstance( triple[0], (URIRef, BNode) ): # exclude QuotedGraph and other objects # value value = ( triple[2] if isinstance(triple[2], Literal) else self._iri_or_bn(triple[2]) ) # datatype if isinstance(triple[2], URIRef): # datatype = "http://www.w3.org/1999/02/22-rdf-syntax-ns#namedNode" datatype = "globalId" elif isinstance(triple[2], BNode): # datatype = "http://www.w3.org/1999/02/22-rdf-syntax-ns#blankNode" datatype = "localId" elif isinstance(triple[2], Literal): if triple[2].datatype is not None: datatype = f"{triple[2].datatype}" else: if triple[2].language is not None: # language datatype = RDF.langString else: datatype = XSD.string else: return None # can't handle non URI, BN or Literal Object (QuotedGraph) # language if isinstance(triple[2], Literal): if triple[2].language is not None: language = f"{triple[2].language}" else: language = "" else: language = "" return '["%s", "%s", "%s", "%s", "%s", "%s"]\n' % ( self._iri_or_bn(triple[0]), triple[1], value, datatype, language, self._context(context), ) else: # do not return anything for non-IRIs or BNs, e.g. QuotedGraph, Subjects return None def _iri_or_bn(self, i_): if isinstance(i_, URIRef): return f"{i_}" elif isinstance(i_, BNode): return f"{i_.n3()}" else: return None def _context(self, context): if self.default_context is None: return "" if context.identifier == "urn:x-rdflib:default": return "" elif context is not None and self.default_context is not None: if context.identifier == self.default_context.identifier: return "" return context.identifier rdflib-6.1.1/rdflib/plugins/serializers/jsonld.py000066400000000000000000000304371415774155300221260ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ This serialiser will output an RDF Graph as a JSON-LD formatted document. See: http://json-ld.org/ Example usage:: >>> from rdflib import Graph >>> testrdf = ''' ... @prefix dc: . ... ... dc:title "Someone's Homepage"@en . ... ''' >>> g = Graph().parse(data=testrdf, format='n3') >>> print(g.serialize(format='json-ld', indent=4)) [ { "@id": "http://example.org/about", "http://purl.org/dc/terms/title": [ { "@language": "en", "@value": "Someone's Homepage" } ] } ] """ # From: https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/serializer.py # NOTE: This code writes the entire JSON object into memory before serialising, # but we should consider streaming the output to deal with arbitrarily large # graphs. import warnings from rdflib.serializer import Serializer from rdflib.graph import Graph from rdflib.term import URIRef, Literal, BNode from rdflib.namespace import RDF, XSD from typing import IO, Optional from ..shared.jsonld.context import Context, UNDEF from ..shared.jsonld.util import json from ..shared.jsonld.keys import CONTEXT, GRAPH, ID, VOCAB, LIST, SET, LANG __all__ = ["JsonLDSerializer", "from_rdf"] PLAIN_LITERAL_TYPES = {XSD.boolean, XSD.integer, XSD.double, XSD.string} class JsonLDSerializer(Serializer): def __init__(self, store: Graph): super(JsonLDSerializer, self).__init__(store) def serialize( self, stream: IO[bytes], base: Optional[str] = None, encoding: Optional[str] = None, **kwargs, ): # TODO: docstring w. args and return value encoding = encoding or "utf-8" if encoding not in ("utf-8", "utf-16"): warnings.warn( "JSON should be encoded as unicode. " f"Given encoding was: {encoding}" ) context_data = kwargs.get("context") use_native_types = (kwargs.get("use_native_types", False),) use_rdf_type = kwargs.get("use_rdf_type", False) auto_compact = kwargs.get("auto_compact", False) indent = kwargs.get("indent", 2) separators = kwargs.get("separators", (",", ": ")) sort_keys = kwargs.get("sort_keys", True) ensure_ascii = kwargs.get("ensure_ascii", False) obj = from_rdf( self.store, context_data, base, use_native_types, use_rdf_type, auto_compact=auto_compact, ) data = json.dumps( obj, indent=indent, separators=separators, sort_keys=sort_keys, ensure_ascii=ensure_ascii, ) stream.write(data.encode(encoding, "replace")) def from_rdf( graph, context_data=None, base=None, use_native_types=False, use_rdf_type=False, auto_compact=False, startnode=None, index=False, ): # TODO: docstring w. args and return value # TODO: support for index and startnode if not context_data and auto_compact: context_data = dict( (pfx, str(ns)) for (pfx, ns) in graph.namespaces() if pfx and str(ns) != "http://www.w3.org/XML/1998/namespace" ) if isinstance(context_data, Context): context = context_data context_data = context.to_dict() else: context = Context(context_data, base=base) converter = Converter(context, use_native_types, use_rdf_type) result = converter.convert(graph) if converter.context.active: if isinstance(result, list): result = {context.get_key(GRAPH): result} result[CONTEXT] = context_data return result class Converter(object): def __init__(self, context, use_native_types, use_rdf_type): self.context = context self.use_native_types = context.active or use_native_types self.use_rdf_type = use_rdf_type def convert(self, graph): # TODO: bug in rdflib dataset parsing (nquads et al): # plain triples end up in separate unnamed graphs (rdflib issue #436) if graph.context_aware: default_graph = Graph() graphs = [default_graph] for g in graph.contexts(): if isinstance(g.identifier, URIRef): graphs.append(g) else: default_graph += g else: graphs = [graph] context = self.context objs = [] for g in graphs: obj = {} graphname = None if isinstance(g.identifier, URIRef): graphname = context.shrink_iri(g.identifier) obj[context.id_key] = graphname nodes = self.from_graph(g) if not graphname and len(nodes) == 1: obj.update(nodes[0]) else: if not nodes: continue obj[context.graph_key] = nodes if objs and objs[0].get(context.get_key(ID)) == graphname: objs[0].update(obj) else: objs.append(obj) if len(graphs) == 1 and len(objs) == 1 and not self.context.active: default = objs[0] items = default.get(context.graph_key) if len(default) == 1 and items: objs = items elif len(objs) == 1 and self.context.active: objs = objs[0] return objs def from_graph(self, graph): nodemap = {} for s in set(graph.subjects()): ## only iri:s and unreferenced (rest will be promoted to top if needed) if isinstance(s, URIRef) or ( isinstance(s, BNode) and not any(graph.subjects(None, s)) ): self.process_subject(graph, s, nodemap) return list(nodemap.values()) def process_subject(self, graph, s, nodemap): if isinstance(s, URIRef): node_id = self.context.shrink_iri(s) elif isinstance(s, BNode): node_id = s.n3() else: node_id = None # used_as_object = any(graph.subjects(None, s)) if node_id in nodemap: return None node = {} node[self.context.id_key] = node_id nodemap[node_id] = node for p, o in graph.predicate_objects(s): self.add_to_node(graph, s, p, o, node, nodemap) return node def add_to_node(self, graph, s, p, o, s_node, nodemap): context = self.context if isinstance(o, Literal): datatype = str(o.datatype) if o.datatype else None language = o.language term = context.find_term(str(p), datatype, language=language) else: containers = [LIST, None] if graph.value(o, RDF.first) else [None] for container in containers: for coercion in (ID, VOCAB, UNDEF): term = context.find_term(str(p), coercion, container) if term: break if term: break node = None use_set = not context.active if term: p_key = term.name if term.type: node = self.type_coerce(o, term.type) elif term.language and o.language == term.language: node = str(o) elif context.language and (term.language is None and o.language is None): node = str(o) if LIST in term.container: node = [ self.type_coerce(v, term.type) or self.to_raw_value(graph, s, v, nodemap) for v in self.to_collection(graph, o) ] elif LANG in term.container and language: value = s_node.setdefault(p_key, {}) values = value.get(language) node = str(o) if values or SET in term.container: if not isinstance(values, list): value[language] = values = [values] values.append(node) else: value[language] = node return elif SET in term.container: use_set = True else: p_key = context.to_symbol(p) # TODO: for coercing curies - quite clumsy; unify to_symbol and find_term? key_term = context.terms.get(p_key) if key_term and (key_term.type or key_term.container): p_key = p if not term and p == RDF.type and not self.use_rdf_type: if isinstance(o, URIRef): node = context.to_symbol(o) p_key = context.type_key if node is None: node = self.to_raw_value(graph, s, o, nodemap) value = s_node.get(p_key) if value: if not isinstance(value, list): value = [value] value.append(node) elif use_set: value = [node] else: value = node s_node[p_key] = value def type_coerce(self, o, coerce_type): if coerce_type == ID: if isinstance(o, URIRef): return self.context.shrink_iri(o) elif isinstance(o, BNode): return o.n3() else: return o elif coerce_type == VOCAB and isinstance(o, URIRef): return self.context.to_symbol(o) elif isinstance(o, Literal) and str(o.datatype) == coerce_type: return o else: return None def to_raw_value(self, graph, s, o, nodemap): context = self.context coll = self.to_collection(graph, o) if coll is not None: coll = [ self.to_raw_value(graph, s, lo, nodemap) for lo in self.to_collection(graph, o) ] return {context.list_key: coll} elif isinstance(o, BNode): embed = ( False # TODO: self.context.active or using startnode and only one ref ) onode = self.process_subject(graph, o, nodemap) if onode: if embed and not any(s2 for s2 in graph.subjects(None, o) if s2 != s): return onode else: nodemap[onode[context.id_key]] = onode return {context.id_key: o.n3()} elif isinstance(o, URIRef): # TODO: embed if o != startnode (else reverse) return {context.id_key: context.shrink_iri(o)} elif isinstance(o, Literal): # TODO: if compact native = self.use_native_types and o.datatype in PLAIN_LITERAL_TYPES if native: v = o.toPython() else: v = str(o) if o.datatype: if native: if self.context.active: return v else: return {context.value_key: v} return { context.type_key: context.to_symbol(o.datatype), context.value_key: v, } elif o.language and o.language != context.language: return {context.lang_key: o.language, context.value_key: v} elif not context.active or context.language and not o.language: return {context.value_key: v} else: return v def to_collection(self, graph, l_): if l_ != RDF.nil and not graph.value(l_, RDF.first): return None list_nodes = [] chain = set([l_]) while l_: if l_ == RDF.nil: return list_nodes if isinstance(l_, URIRef): return None first, rest = None, None for p, o in graph.predicate_objects(l_): if not first and p == RDF.first: first = o elif not rest and p == RDF.rest: rest = o elif p != RDF.type or o != RDF.List: return None list_nodes.append(first) l_ = rest if l_ in chain: return None chain.add(l_) rdflib-6.1.1/rdflib/plugins/serializers/longturtle.py000066400000000000000000000225721415774155300230350ustar00rootroot00000000000000""" LongTurtle RDF graph serializer for RDFLib. See for syntax specification. This variant, longturtle as opposed to just turtle, makes some small format changes to turtle - the original turtle serializer. It: * uses PREFIX instead of @prefix * uses BASE instead of @base * adds a new line at RDF.type, or 'a' * adds a newline and an indent for all triples with more than one object (object list) * adds a new line and ';' for the last triple in a set with '.' on the start of the next line * uses default encoding (encode()) is used instead of "latin-1" - Nicholas Car, 2021 """ from rdflib.term import BNode, Literal, URIRef from rdflib.exceptions import Error from .turtle import RecursiveSerializer from rdflib.namespace import RDF __all__ = ["LongTurtleSerializer"] SUBJECT = 0 VERB = 1 OBJECT = 2 _GEN_QNAME_FOR_DT = False _SPACIOUS_OUTPUT = False class LongTurtleSerializer(RecursiveSerializer): short_name = "longturtle" indentString = " " def __init__(self, store): self._ns_rewrite = {} super(LongTurtleSerializer, self).__init__(store) self.keywords = {RDF.type: "a"} self.reset() self.stream = None self._spacious = _SPACIOUS_OUTPUT def addNamespace(self, prefix, namespace): # Turtle does not support prefixes that start with _ # if they occur in the graph, rewrite to p_blah # this is more complicated since we need to make sure p_blah # does not already exist. And we register namespaces as we go, i.e. # we may first see a triple with prefix _9 - rewrite it to p_9 # and then later find a triple with a "real" p_9 prefix # so we need to keep track of ns rewrites we made so far. if (prefix > "" and prefix[0] == "_") or self.namespaces.get( prefix, namespace ) != namespace: if prefix not in self._ns_rewrite: p = "p" + prefix while p in self.namespaces: p = "p" + p self._ns_rewrite[prefix] = p prefix = self._ns_rewrite.get(prefix, prefix) super(LongTurtleSerializer, self).addNamespace(prefix, namespace) return prefix def reset(self): super(LongTurtleSerializer, self).reset() self._shortNames = {} self._started = False self._ns_rewrite = {} def serialize(self, stream, base=None, encoding=None, spacious=None, **args): self.reset() self.stream = stream # if base is given here, use, if not and a base is set for the graph use that if base is not None: self.base = base elif self.store.base is not None: self.base = self.store.base if spacious is not None: self._spacious = spacious self.preprocess() subjects_list = self.orderSubjects() self.startDocument() firstTime = True for subject in subjects_list: if self.isDone(subject): continue if firstTime: firstTime = False if self.statement(subject) and not firstTime: self.write("\n") self.endDocument() self.write("\n") self.base = None def preprocessTriple(self, triple): super(LongTurtleSerializer, self).preprocessTriple(triple) for i, node in enumerate(triple): if node in self.keywords: continue # Don't use generated prefixes for subjects and objects self.getQName(node, gen_prefix=(i == VERB)) if isinstance(node, Literal) and node.datatype: self.getQName(node.datatype, gen_prefix=_GEN_QNAME_FOR_DT) p = triple[1] if isinstance(p, BNode): # hmm - when is P ever a bnode? self._references[p] += 1 def getQName(self, uri, gen_prefix=True): if not isinstance(uri, URIRef): return None try: parts = self.store.compute_qname(uri, generate=gen_prefix) except: # is the uri a namespace in itself? pfx = self.store.store.prefix(uri) if pfx is not None: parts = (pfx, uri, "") else: # nothing worked return None prefix, namespace, local = parts # QName cannot end with . if local.endswith("."): return None prefix = self.addNamespace(prefix, namespace) return "%s:%s" % (prefix, local) def startDocument(self): self._started = True ns_list = sorted(self.namespaces.items()) if self.base: self.write(self.indent() + "BASE <%s>\n" % self.base) for prefix, uri in ns_list: self.write(self.indent() + "PREFIX %s: <%s>\n" % (prefix, uri)) if ns_list and self._spacious: self.write("\n") def endDocument(self): if self._spacious: self.write("\n") def statement(self, subject): self.subjectDone(subject) return self.s_squared(subject) or self.s_default(subject) def s_default(self, subject): self.write("\n" + self.indent()) self.path(subject, SUBJECT) self.write("\n" + self.indent()) self.predicateList(subject) self.write(" ;\n.") return True def s_squared(self, subject): if (self._references[subject] > 0) or not isinstance(subject, BNode): return False self.write("\n" + self.indent() + "[]") self.predicateList(subject) self.write(" ;\n.") return True def path(self, node, position, newline=False): if not ( self.p_squared(node, position, newline) or self.p_default(node, position, newline) ): raise Error("Cannot serialize node '%s'" % (node,)) def p_default(self, node, position, newline=False): if position != SUBJECT and not newline: self.write(" ") self.write(self.label(node, position)) return True def label(self, node, position): if node == RDF.nil: return "()" if position is VERB and node in self.keywords: return self.keywords[node] if isinstance(node, Literal): return node._literal_n3( use_plain=True, qname_callback=lambda dt: self.getQName(dt, _GEN_QNAME_FOR_DT), ) else: node = self.relativize(node) return self.getQName(node, position == VERB) or node.n3() def p_squared(self, node, position, newline=False): if ( not isinstance(node, BNode) or node in self._serialized or self._references[node] > 1 or position == SUBJECT ): return False if not newline: self.write(" ") if self.isValidList(node): # this is a list self.depth += 2 self.write("(\n") self.depth -= 1 self.doList(node) self.depth -= 1 self.write("\n" + self.indent(1) + ")") else: self.subjectDone(node) self.depth += 2 self.write("[\n") self.depth -= 1 self.predicateList(node, newline=False) self.depth -= 1 self.write("\n" + self.indent(1) + "]") return True def isValidList(self, l_): """ Checks if l is a valid RDF list, i.e. no nodes have other properties. """ try: if self.store.value(l_, RDF.first) is None: return False except: return False while l_: if l_ != RDF.nil and len(list(self.store.predicate_objects(l_))) != 2: return False l_ = self.store.value(l_, RDF.rest) return True def doList(self, l_): i = 0 while l_: item = self.store.value(l_, RDF.first) if item is not None: if i == 0: self.write(self.indent(1)) else: self.write("\n" + self.indent(1)) self.path(item, OBJECT, newline=True) self.subjectDone(l_) l_ = self.store.value(l_, RDF.rest) i += 1 def predicateList(self, subject, newline=False): properties = self.buildPredicateHash(subject) propList = self.sortProperties(properties) if len(propList) == 0: return self.write(self.indent(1)) self.verb(propList[0], newline=True) self.objectList(properties[propList[0]]) for predicate in propList[1:]: self.write(" ;\n" + self.indent(1)) self.verb(predicate, newline=True) self.objectList(properties[predicate]) def verb(self, node, newline=False): self.path(node, VERB, newline) def objectList(self, objects): count = len(objects) if count == 0: return depthmod = (count == 1) and 0 or 1 self.depth += depthmod first_nl = False if count > 1: self.write("\n" + self.indent(1)) first_nl = True self.path(objects[0], OBJECT, newline=first_nl) for obj in objects[1:]: self.write(" ,\n") self.write(self.indent(1)) self.path(obj, OBJECT, newline=True) self.depth -= depthmod rdflib-6.1.1/rdflib/plugins/serializers/n3.py000066400000000000000000000073671415774155300211630ustar00rootroot00000000000000""" Notation 3 (N3) RDF graph serializer for RDFLib. """ from rdflib.graph import Graph from rdflib.namespace import Namespace, OWL from rdflib.plugins.serializers.turtle import TurtleSerializer, SUBJECT, OBJECT __all__ = ["N3Serializer"] SWAP_LOG = Namespace("http://www.w3.org/2000/10/swap/log#") class N3Serializer(TurtleSerializer): short_name = "n3" def __init__(self, store: Graph, parent=None): super(N3Serializer, self).__init__(store) self.keywords.update({OWL.sameAs: "=", SWAP_LOG.implies: "=>"}) self.parent = parent def reset(self): super(N3Serializer, self).reset() self._stores = {} def subjectDone(self, subject): super(N3Serializer, self).subjectDone(subject) if self.parent: self.parent.subjectDone(subject) def isDone(self, subject): return super(N3Serializer, self).isDone(subject) and ( not self.parent or self.parent.isDone(subject) ) def startDocument(self): super(N3Serializer, self).startDocument() # if not isinstance(self.store, N3Store): # return # # all_list = [self.label(var) for var in # self.store.get_universals(recurse=False)] # all_list.sort() # some_list = [self.label(var) for var in # self.store.get_existentials(recurse=False)] # some_list.sort() # # for var in all_list: # self.write('\n'+self.indent()+'@forAll %s. '%var) # for var in some_list: # self.write('\n'+self.indent()+'@forSome %s. '%var) # # if (len(all_list) + len(some_list)) > 0: # self.write('\n') def endDocument(self): if not self.parent: super(N3Serializer, self).endDocument() def indent(self, modifier=0): indent = super(N3Serializer, self).indent(modifier) if self.parent is not None: indent += self.parent.indent() # modifier) return indent def preprocessTriple(self, triple): super(N3Serializer, self).preprocessTriple(triple) if isinstance(triple[0], Graph): for t in triple[0]: self.preprocessTriple(t) if isinstance(triple[2], Graph): for t in triple[2]: self.preprocessTriple(t) def getQName(self, uri, gen_prefix=True): qname = None if self.parent is not None: qname = self.parent.getQName(uri, gen_prefix) if qname is None: qname = super(N3Serializer, self).getQName(uri, gen_prefix) return qname def statement(self, subject): self.subjectDone(subject) properties = self.buildPredicateHash(subject) if len(properties) == 0: return False return self.s_clause(subject) or super(N3Serializer, self).statement(subject) def path(self, node, position, newline=False): if not self.p_clause(node, position): super(N3Serializer, self).path(node, position, newline) def s_clause(self, subject): if isinstance(subject, Graph): self.write("\n" + self.indent()) self.p_clause(subject, SUBJECT) self.predicateList(subject) self.write(" .") return True else: return False def p_clause(self, node, position): if isinstance(node, Graph): self.subjectDone(node) if position is OBJECT: self.write(" ") self.write("{") self.depth += 1 serializer = N3Serializer(node, parent=self) serializer.serialize(self.stream) self.depth -= 1 self.write(self.indent() + "}") return True else: return False rdflib-6.1.1/rdflib/plugins/serializers/nquads.py000066400000000000000000000033661415774155300221310ustar00rootroot00000000000000from typing import IO, Optional import warnings from rdflib.graph import ConjunctiveGraph, Graph from rdflib.term import Literal from rdflib.serializer import Serializer from rdflib.plugins.serializers.nt import _quoteLiteral __all__ = ["NQuadsSerializer"] class NQuadsSerializer(Serializer): def __init__(self, store: Graph): if not store.context_aware: raise Exception( "NQuads serialization only makes " "sense for context-aware stores!" ) super(NQuadsSerializer, self).__init__(store) self.store: ConjunctiveGraph def serialize( self, stream: IO[bytes], base: Optional[str] = None, encoding: Optional[str] = None, **args, ): if base is not None: warnings.warn("NQuadsSerializer does not support base.") if encoding is not None and encoding.lower() != self.encoding.lower(): warnings.warn( "NQuadsSerializer does not use custom encoding. " f"Given encoding was: {encoding}" ) encoding = self.encoding for context in self.store.contexts(): for triple in context: stream.write( _nq_row(triple, context.identifier).encode(encoding, "replace") ) stream.write("\n".encode("latin-1")) def _nq_row(triple, context): if isinstance(triple[2], Literal): return "%s %s %s %s .\n" % ( triple[0].n3(), triple[1].n3(), _quoteLiteral(triple[2]), context.n3(), ) else: return "%s %s %s %s .\n" % ( triple[0].n3(), triple[1].n3(), triple[2].n3(), context.n3(), ) rdflib-6.1.1/rdflib/plugins/serializers/nt.py000066400000000000000000000050711415774155300212520ustar00rootroot00000000000000""" N-Triples RDF graph serializer for RDFLib. See for details about the format. """ from typing import IO, Optional from rdflib.graph import Graph from rdflib.term import Literal from rdflib.serializer import Serializer import warnings import codecs __all__ = ["NTSerializer"] class NTSerializer(Serializer): """ Serializes RDF graphs to NTriples format. """ def __init__(self, store: Graph): Serializer.__init__(self, store) def serialize( self, stream: IO[bytes], base: Optional[str] = None, encoding: Optional[str] = "utf-8", **args, ): if base is not None: warnings.warn("NTSerializer does not support base.") if encoding != "utf-8": warnings.warn( "NTSerializer always uses UTF-8 encoding. " f"Given encoding was: {encoding}" ) for triple in self.store: stream.write(_nt_row(triple).encode()) stream.write("\n".encode()) class NT11Serializer(NTSerializer): """ Serializes RDF graphs to RDF 1.1 NTriples format. Exactly like nt - only utf8 encoded. """ def __init__(self, store: Graph): Serializer.__init__(self, store) # default to utf-8 def _nt_row(triple): if isinstance(triple[2], Literal): return "%s %s %s .\n" % ( triple[0].n3(), triple[1].n3(), _quoteLiteral(triple[2]), ) else: return "%s %s %s .\n" % (triple[0].n3(), triple[1].n3(), triple[2].n3()) def _quoteLiteral(l_): """ a simpler version of term.Literal.n3() """ encoded = _quote_encode(l_) if l_.language: if l_.datatype: raise Exception("Literal has datatype AND language!") return "%s@%s" % (encoded, l_.language) elif l_.datatype: return "%s^^<%s>" % (encoded, l_.datatype) else: return "%s" % encoded def _quote_encode(l_): return '"%s"' % l_.replace("\\", "\\\\").replace("\n", "\\n").replace( '"', '\\"' ).replace("\r", "\\r") def _nt_unicode_error_resolver(err): """ Do unicode char replaces as defined in https://www.w3.org/TR/2004/REC-rdf-testcases-20040210/#ntrip_strings """ def _replace_single(c): c = ord(c) fmt = "\\u%04X" if c <= 0xFFFF else "\\U%08X" return fmt % c string = err.object[err.start : err.end] return "".join(_replace_single(c) for c in string), err.end codecs.register_error("_rdflib_nt_escape", _nt_unicode_error_resolver) rdflib-6.1.1/rdflib/plugins/serializers/rdfxml.py000066400000000000000000000312031415774155300221210ustar00rootroot00000000000000from typing import IO, Dict, Optional, Set from rdflib.plugins.serializers.xmlwriter import XMLWriter from rdflib.namespace import Namespace, RDF, RDFS # , split_uri from rdflib.plugins.parsers.RDFVOC import RDFVOC from rdflib.graph import Graph from rdflib.term import Identifier, URIRef, Literal, BNode from rdflib.util import first, more_than from rdflib.collection import Collection from rdflib.serializer import Serializer from xml.sax.saxutils import quoteattr, escape import xml.dom.minidom from .xmlwriter import ESCAPE_ENTITIES __all__ = ["fix", "XMLSerializer", "PrettyXMLSerializer"] class XMLSerializer(Serializer): def __init__(self, store: Graph): super(XMLSerializer, self).__init__(store) def __bindings(self): store = self.store nm = store.namespace_manager bindings = {} for predicate in set(store.predicates()): prefix, namespace, name = nm.compute_qname_strict(predicate) bindings[prefix] = URIRef(namespace) RDFNS = URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#") if "rdf" in bindings: assert bindings["rdf"] == RDFNS else: bindings["rdf"] = RDFNS for prefix, namespace in bindings.items(): yield prefix, namespace def serialize( self, stream: IO[bytes], base: Optional[str] = None, encoding: Optional[str] = None, **args, ): # if base is given here, use that, if not and a base is set for the graph use that if base is not None: self.base = base elif self.store.base is not None: self.base = self.store.base self.__stream = stream self.__serialized: Dict[Identifier, int] = {} encoding = self.encoding self.write = write = lambda uni: stream.write(uni.encode(encoding, "replace")) # startDocument write('\n' % self.encoding) # startRDF write("\n") # write out triples by subject for subject in self.store.subjects(): self.subject(subject, 1) # endRDF write("\n") # Set to None so that the memory can get garbage collected. # self.__serialized = None del self.__serialized def subject(self, subject, depth=1): if subject not in self.__serialized: self.__serialized[subject] = 1 if isinstance(subject, (BNode, URIRef)): write = self.write indent = " " * depth element_name = "rdf:Description" if isinstance(subject, BNode): write('%s<%s rdf:nodeID="%s"' % (indent, element_name, subject)) else: uri = quoteattr(self.relativize(subject)) write("%s<%s rdf:about=%s" % (indent, element_name, uri)) if (subject, None, None) in self.store: write(">\n") for predicate, object in self.store.predicate_objects(subject): self.predicate(predicate, object, depth + 1) write("%s\n" % (indent, element_name)) else: write("/>\n") def predicate(self, predicate, object, depth=1): write = self.write indent = " " * depth qname = self.store.namespace_manager.qname_strict(predicate) if isinstance(object, Literal): attributes = "" if object.language: attributes += ' xml:lang="%s"' % object.language if object.datatype: attributes += ' rdf:datatype="%s"' % object.datatype write( "%s<%s%s>%s\n" % (indent, qname, attributes, escape(object, ESCAPE_ENTITIES), qname) ) else: if isinstance(object, BNode): write('%s<%s rdf:nodeID="%s"/>\n' % (indent, qname, object)) else: write( "%s<%s rdf:resource=%s/>\n" % (indent, qname, quoteattr(self.relativize(object))) ) XMLLANG = "http://www.w3.org/XML/1998/namespacelang" XMLBASE = "http://www.w3.org/XML/1998/namespacebase" OWL_NS = Namespace("http://www.w3.org/2002/07/owl#") # TODO: def fix(val): "strip off _: from nodeIDs... as they are not valid NCNames" if val.startswith("_:"): return val[2:] else: return val class PrettyXMLSerializer(Serializer): def __init__(self, store: Graph, max_depth=3): super(PrettyXMLSerializer, self).__init__(store) self.forceRDFAbout: Set[URIRef] = set() def serialize( self, stream: IO[bytes], base: Optional[str] = None, encoding: Optional[str] = None, **args, ): self.__serialized: Dict[Identifier, int] = {} store = self.store # if base is given here, use that, if not and a base is set for the graph use that if base is not None: self.base = base elif store.base is not None: self.base = store.base self.max_depth = args.get("max_depth", 3) assert self.max_depth > 0, "max_depth must be greater than 0" self.nm = nm = store.namespace_manager self.writer = writer = XMLWriter(stream, nm, encoding) namespaces = {} possible = set(store.predicates()).union(store.objects(None, RDF.type)) for predicate in possible: prefix, namespace, local = nm.compute_qname_strict(predicate) namespaces[prefix] = namespace namespaces["rdf"] = "http://www.w3.org/1999/02/22-rdf-syntax-ns#" writer.push(RDFVOC.RDF) if "xml_base" in args: writer.attribute(XMLBASE, args["xml_base"]) elif self.base: writer.attribute(XMLBASE, self.base) writer.namespaces(namespaces.items()) subject: Identifier # Write out subjects that can not be inline for subject in store.subjects(): # type: ignore[assignment] if (None, None, subject) in store: if (subject, None, subject) in store: self.subject(subject, 1) else: self.subject(subject, 1) # write out anything that has not yet been reached # write out BNodes last (to ensure they can be inlined where possible) bnodes = set() for subject in store.subjects(): # type: ignore[assignment] if isinstance(subject, BNode): bnodes.add(subject) continue self.subject(subject, 1) # now serialize only those BNodes that have not been serialized yet for bnode in bnodes: if bnode not in self.__serialized: self.subject(subject, 1) writer.pop(RDFVOC.RDF) stream.write("\n".encode("latin-1")) # Set to None so that the memory can get garbage collected. self.__serialized = None # type: ignore[assignment] def subject(self, subject: Identifier, depth: int = 1): store = self.store writer = self.writer if subject in self.forceRDFAbout: writer.push(RDFVOC.Description) writer.attribute(RDFVOC.about, self.relativize(subject)) writer.pop(RDFVOC.Description) self.forceRDFAbout.remove(subject) # type: ignore[arg-type] elif subject not in self.__serialized: self.__serialized[subject] = 1 type = first(store.objects(subject, RDF.type)) try: self.nm.qname(type) except: type = None element = type or RDFVOC.Description writer.push(element) if isinstance(subject, BNode): def subj_as_obj_more_than(ceil): return True # more_than(store.triples((None, None, subject)), ceil) # here we only include BNode labels if they are referenced # more than once (this reduces the use of redundant BNode # identifiers) if subj_as_obj_more_than(1): writer.attribute(RDFVOC.nodeID, fix(subject)) else: writer.attribute(RDFVOC.about, self.relativize(subject)) if (subject, None, None) in store: for predicate, object in store.predicate_objects(subject): if not (predicate == RDF.type and object == type): self.predicate(predicate, object, depth + 1) writer.pop(element) elif subject in self.forceRDFAbout: # TODO FIXME?: this looks like a duplicate of first condition writer.push(RDFVOC.Description) writer.attribute(RDFVOC.about, self.relativize(subject)) writer.pop(RDFVOC.Description) self.forceRDFAbout.remove(subject) # type: ignore[arg-type] def predicate(self, predicate, object, depth=1): writer = self.writer store = self.store writer.push(predicate) if isinstance(object, Literal): if object.language: writer.attribute(XMLLANG, object.language) if object.datatype == RDF.XMLLiteral and isinstance( object.value, xml.dom.minidom.Document ): writer.attribute(RDFVOC.parseType, "Literal") writer.text("") writer.stream.write(object) else: if object.datatype: writer.attribute(RDFVOC.datatype, object.datatype) writer.text(object) elif object in self.__serialized or not (object, None, None) in store: if isinstance(object, BNode): if more_than(store.triples((None, None, object)), 0): writer.attribute(RDFVOC.nodeID, fix(object)) else: writer.attribute(RDFVOC.resource, self.relativize(object)) else: if first(store.objects(object, RDF.first)): # may not have type # RDF.List self.__serialized[object] = 1 # Warn that any assertions on object other than # RDF.first and RDF.rest are ignored... including RDF.List import warnings warnings.warn( "Assertions on %s other than RDF.first " % repr(object) + "and RDF.rest are ignored ... including RDF.List", UserWarning, stacklevel=2, ) writer.attribute(RDFVOC.parseType, "Collection") col = Collection(store, object) for item in col: if isinstance(item, URIRef): self.forceRDFAbout.add(item) self.subject(item) if not isinstance(item, URIRef): self.__serialized[item] = 1 else: if first( store.triples_choices( (object, RDF.type, [OWL_NS.Class, RDFS.Class]) ) ) and isinstance(object, URIRef): writer.attribute(RDFVOC.resource, self.relativize(object)) elif depth <= self.max_depth: self.subject(object, depth + 1) elif isinstance(object, BNode): if ( object not in self.__serialized and (object, None, None) in store and len(list(store.subjects(object=object))) == 1 ): # inline blank nodes if they haven't been serialized yet # and are only referenced once (regardless of depth) self.subject(object, depth + 1) else: writer.attribute(RDFVOC.nodeID, fix(object)) else: writer.attribute(RDFVOC.resource, self.relativize(object)) writer.pop(predicate) rdflib-6.1.1/rdflib/plugins/serializers/trig.py000066400000000000000000000066051415774155300216020ustar00rootroot00000000000000""" Trig RDF graph serializer for RDFLib. See for syntax specification. """ from collections import defaultdict from typing import IO, TYPE_CHECKING, Optional, Union from rdflib.graph import ConjunctiveGraph, Graph from rdflib.plugins.serializers.turtle import TurtleSerializer from rdflib.term import BNode, Node __all__ = ["TrigSerializer"] class TrigSerializer(TurtleSerializer): short_name = "trig" indentString = 4 * " " def __init__(self, store: Union[Graph, ConjunctiveGraph]): self.default_context: Optional[Node] if store.context_aware: if TYPE_CHECKING: assert isinstance(store, ConjunctiveGraph) self.contexts = list(store.contexts()) self.default_context = store.default_context.identifier if store.default_context: self.contexts.append(store.default_context) else: self.contexts = [store] self.default_context = None super(TrigSerializer, self).__init__(store) def preprocess(self): for context in self.contexts: self.store = context self.getQName(context.identifier) self._references = defaultdict(int) self._subjects = {} for triple in context: self.preprocessTriple(triple) self._contexts[context] = ( self.orderSubjects(), self._subjects, self._references, ) def reset(self): super(TrigSerializer, self).reset() self._contexts = {} def serialize( self, stream: IO[bytes], base: Optional[str] = None, encoding: Optional[str] = None, spacious: Optional[bool] = None, **args, ): self.reset() self.stream = stream # if base is given here, use that, if not and a base is set for the graph use that if base is not None: self.base = base elif self.store.base is not None: self.base = self.store.base if spacious is not None: self._spacious = spacious self.preprocess() self.startDocument() firstTime = True for store, (ordered_subjects, subjects, ref) in self._contexts.items(): if not ordered_subjects: continue self._references = ref self._serialized = {} self.store = store self._subjects = subjects if self.default_context and store.identifier == self.default_context: self.write(self.indent() + "\n{") else: if isinstance(store.identifier, BNode): iri = store.identifier.n3() else: iri = self.getQName(store.identifier) if iri is None: iri = store.identifier.n3() self.write(self.indent() + "\n%s {" % iri) self.depth += 1 for subject in ordered_subjects: if self.isDone(subject): continue if firstTime: firstTime = False if self.statement(subject) and not firstTime: self.write("\n") self.depth -= 1 self.write("}\n") self.endDocument() stream.write("\n".encode("latin-1")) rdflib-6.1.1/rdflib/plugins/serializers/trix.py000066400000000000000000000064241415774155300216220ustar00rootroot00000000000000from typing import IO, Optional from rdflib.serializer import Serializer from rdflib.plugins.serializers.xmlwriter import XMLWriter from rdflib.term import URIRef, Literal, BNode from rdflib.namespace import Namespace from rdflib.graph import Graph, ConjunctiveGraph __all__ = ["TriXSerializer"] # TODO: Move this somewhere central TRIXNS = Namespace("http://www.w3.org/2004/03/trix/trix-1/") XMLNS = Namespace("http://www.w3.org/XML/1998/namespace") class TriXSerializer(Serializer): def __init__(self, store: Graph): super(TriXSerializer, self).__init__(store) if not store.context_aware: raise Exception( "TriX serialization only makes sense for context-aware stores" ) def serialize( self, stream: IO[bytes], base: Optional[str] = None, encoding: Optional[str] = None, **args, ): nm = self.store.namespace_manager self.writer = XMLWriter(stream, nm, encoding, extra_ns={"": TRIXNS}) self.writer.push(TRIXNS["TriX"]) # if base is given here, use that, if not and a base is set for the graph use that if base is None and self.store.base is not None: base = self.store.base if base is not None: self.writer.attribute("http://www.w3.org/XML/1998/namespacebase", base) self.writer.namespaces() if isinstance(self.store, ConjunctiveGraph): for subgraph in self.store.contexts(): self._writeGraph(subgraph) elif isinstance(self.store, Graph): self._writeGraph(self.store) else: raise Exception("Unknown graph type: " + type(self.store)) self.writer.pop() stream.write("\n".encode("latin-1")) def _writeGraph(self, graph): self.writer.push(TRIXNS["graph"]) if graph.base: self.writer.attribute( "http://www.w3.org/XML/1998/namespacebase", graph.base ) if isinstance(graph.identifier, URIRef): self.writer.element(TRIXNS["uri"], content=str(graph.identifier)) for triple in graph.triples((None, None, None)): self._writeTriple(triple) self.writer.pop() def _writeTriple(self, triple): self.writer.push(TRIXNS["triple"]) for component in triple: if isinstance(component, URIRef): self.writer.element(TRIXNS["uri"], content=str(component)) elif isinstance(component, BNode): self.writer.element(TRIXNS["id"], content=str(component)) elif isinstance(component, Literal): if component.datatype: self.writer.element( TRIXNS["typedLiteral"], content=str(component), attributes={TRIXNS["datatype"]: str(component.datatype)}, ) elif component.language: self.writer.element( TRIXNS["plainLiteral"], content=str(component), attributes={XMLNS["lang"]: str(component.language)}, ) else: self.writer.element(TRIXNS["plainLiteral"], content=str(component)) self.writer.pop() rdflib-6.1.1/rdflib/plugins/serializers/turtle.py000066400000000000000000000326301415774155300221510ustar00rootroot00000000000000""" Turtle RDF graph serializer for RDFLib. See for syntax specification. """ from collections import defaultdict from functools import cmp_to_key from rdflib.term import BNode, Literal, URIRef from rdflib.exceptions import Error from rdflib.serializer import Serializer from rdflib.namespace import RDF, RDFS __all__ = ["RecursiveSerializer", "TurtleSerializer"] def _object_comparator(a, b): """ for nice clean output we sort the objects of triples, some of them are literals, these are sorted according to the sort order of the underlying python objects in py3 not all things are comparable. This falls back on comparing string representations when not. """ try: if a > b: return 1 if a < b: return -1 return 0 except TypeError: a = str(a) b = str(b) return (a > b) - (a < b) class RecursiveSerializer(Serializer): topClasses = [RDFS.Class] predicateOrder = [RDF.type, RDFS.label] maxDepth = 10 indentString = " " roundtrip_prefixes = () def __init__(self, store): super(RecursiveSerializer, self).__init__(store) self.stream = None self.reset() def addNamespace(self, prefix, uri): if prefix in self.namespaces and self.namespaces[prefix] != uri: raise Exception( "Trying to override namespace prefix %s => %s, but it's already bound to %s" % (prefix, uri, self.namespaces[prefix]) ) self.namespaces[prefix] = uri def checkSubject(self, subject): """Check to see if the subject should be serialized yet""" if ( (self.isDone(subject)) or (subject not in self._subjects) or ((subject in self._topLevels) and (self.depth > 1)) or (isinstance(subject, URIRef) and (self.depth >= self.maxDepth)) ): return False return True def isDone(self, subject): """Return true if subject is serialized""" return subject in self._serialized def orderSubjects(self): seen = {} subjects = [] for classURI in self.topClasses: members = list(self.store.subjects(RDF.type, classURI)) members.sort() subjects.extend(members) for member in members: self._topLevels[member] = True seen[member] = True recursable = [ (isinstance(subject, BNode), self._references[subject], subject) for subject in self._subjects if subject not in seen ] recursable.sort() subjects.extend([subject for (isbnode, refs, subject) in recursable]) return subjects def preprocess(self): for triple in self.store.triples((None, None, None)): self.preprocessTriple(triple) def preprocessTriple(self, spo): s, p, o = spo self._references[o] += 1 self._subjects[s] = True def reset(self): self.depth = 0 self.lists = {} self.namespaces = {} self._references = defaultdict(int) self._serialized = {} self._subjects = {} self._topLevels = {} if self.roundtrip_prefixes: if hasattr(self.roundtrip_prefixes, "__iter__"): for prefix, ns in self.store.namespaces(): if prefix in self.roundtrip_prefixes: self.addNamespace(prefix, ns) else: for prefix, ns in self.store.namespaces(): self.addNamespace(prefix, ns) def buildPredicateHash(self, subject): """ Build a hash key by predicate to a list of objects for the given subject """ properties = {} for s, p, o in self.store.triples((subject, None, None)): oList = properties.get(p, []) oList.append(o) properties[p] = oList return properties def sortProperties(self, properties): """Take a hash from predicate uris to lists of values. Sort the lists of values. Return a sorted list of properties.""" # Sort object lists for prop, objects in properties.items(): objects.sort(key=cmp_to_key(_object_comparator)) # Make sorted list of properties propList = [] seen = {} for prop in self.predicateOrder: if (prop in properties) and (prop not in seen): propList.append(prop) seen[prop] = True props = list(properties.keys()) props.sort() for prop in props: if prop not in seen: propList.append(prop) seen[prop] = True return propList def subjectDone(self, subject): """Mark a subject as done.""" self._serialized[subject] = True def indent(self, modifier=0): """Returns indent string multiplied by the depth""" return (self.depth + modifier) * self.indentString def write(self, text): """Write text in given encoding.""" self.stream.write(text.encode(self.encoding, "replace")) SUBJECT = 0 VERB = 1 OBJECT = 2 _GEN_QNAME_FOR_DT = False _SPACIOUS_OUTPUT = False class TurtleSerializer(RecursiveSerializer): short_name = "turtle" indentString = " " def __init__(self, store): self._ns_rewrite = {} super(TurtleSerializer, self).__init__(store) self.keywords = {RDF.type: "a"} self.reset() self.stream = None self._spacious = _SPACIOUS_OUTPUT def addNamespace(self, prefix, namespace): # Turtle does not support prefix that start with _ # if they occur in the graph, rewrite to p_blah # this is more complicated since we need to make sure p_blah # does not already exist. And we register namespaces as we go, i.e. # we may first see a triple with prefix _9 - rewrite it to p_9 # and then later find a triple with a "real" p_9 prefix # so we need to keep track of ns rewrites we made so far. if (prefix > "" and prefix[0] == "_") or self.namespaces.get( prefix, namespace ) != namespace: if prefix not in self._ns_rewrite: p = "p" + prefix while p in self.namespaces: p = "p" + p self._ns_rewrite[prefix] = p prefix = self._ns_rewrite.get(prefix, prefix) super(TurtleSerializer, self).addNamespace(prefix, namespace) return prefix def reset(self): super(TurtleSerializer, self).reset() self._shortNames = {} self._started = False self._ns_rewrite = {} def serialize(self, stream, base=None, encoding=None, spacious=None, **args): self.reset() self.stream = stream # if base is given here, use that, if not and a base is set for the graph use that if base is not None: self.base = base elif self.store.base is not None: self.base = self.store.base if spacious is not None: self._spacious = spacious self.preprocess() subjects_list = self.orderSubjects() self.startDocument() firstTime = True for subject in subjects_list: if self.isDone(subject): continue if firstTime: firstTime = False if self.statement(subject) and not firstTime: self.write("\n") self.endDocument() stream.write("\n".encode("latin-1")) self.base = None def preprocessTriple(self, triple): super(TurtleSerializer, self).preprocessTriple(triple) for i, node in enumerate(triple): if node in self.keywords: continue # Don't use generated prefixes for subjects and objects self.getQName(node, gen_prefix=(i == VERB)) if isinstance(node, Literal) and node.datatype: self.getQName(node.datatype, gen_prefix=_GEN_QNAME_FOR_DT) p = triple[1] if isinstance(p, BNode): # hmm - when is P ever a bnode? self._references[p] += 1 def getQName(self, uri, gen_prefix=True): if not isinstance(uri, URIRef): return None parts = None try: parts = self.store.compute_qname(uri, generate=gen_prefix) except: # is the uri a namespace in itself? pfx = self.store.store.prefix(uri) if pfx is not None: parts = (pfx, uri, "") else: # nothing worked return None prefix, namespace, local = parts # QName cannot end with . if local.endswith("."): return None prefix = self.addNamespace(prefix, namespace) return "%s:%s" % (prefix, local) def startDocument(self): self._started = True ns_list = sorted(self.namespaces.items()) if self.base: self.write(self.indent() + "@base <%s> .\n" % self.base) for prefix, uri in ns_list: self.write(self.indent() + "@prefix %s: <%s> .\n" % (prefix, uri)) if ns_list and self._spacious: self.write("\n") def endDocument(self): if self._spacious: self.write("\n") def statement(self, subject): self.subjectDone(subject) return self.s_squared(subject) or self.s_default(subject) def s_default(self, subject): self.write("\n" + self.indent()) self.path(subject, SUBJECT) self.predicateList(subject) self.write(" .") return True def s_squared(self, subject): if (self._references[subject] > 0) or not isinstance(subject, BNode): return False self.write("\n" + self.indent() + "[]") self.predicateList(subject) self.write(" .") return True def path(self, node, position, newline=False): if not ( self.p_squared(node, position, newline) or self.p_default(node, position, newline) ): raise Error("Cannot serialize node '%s'" % (node,)) def p_default(self, node, position, newline=False): if position != SUBJECT and not newline: self.write(" ") self.write(self.label(node, position)) return True def label(self, node, position): if node == RDF.nil: return "()" if position is VERB and node in self.keywords: return self.keywords[node] if isinstance(node, Literal): return node._literal_n3( use_plain=True, qname_callback=lambda dt: self.getQName(dt, _GEN_QNAME_FOR_DT), ) else: node = self.relativize(node) return self.getQName(node, position == VERB) or node.n3() def p_squared(self, node, position, newline=False): if ( not isinstance(node, BNode) or node in self._serialized or self._references[node] > 1 or position == SUBJECT ): return False if not newline: self.write(" ") if self.isValidList(node): # this is a list self.write("(") self.depth += 1 # 2 self.doList(node) self.depth -= 1 # 2 self.write(" )") else: self.subjectDone(node) self.depth += 2 # self.write('[\n' + self.indent()) self.write("[") self.depth -= 1 # self.predicateList(node, newline=True) self.predicateList(node, newline=False) # self.write('\n' + self.indent() + ']') self.write(" ]") self.depth -= 1 return True def isValidList(self, l_): """ Checks if l is a valid RDF list, i.e. no nodes have other properties. """ try: if self.store.value(l_, RDF.first) is None: return False except: return False while l_: if l_ != RDF.nil and len(list(self.store.predicate_objects(l_))) != 2: return False l_ = self.store.value(l_, RDF.rest) return True def doList(self, l_): while l_: item = self.store.value(l_, RDF.first) if item is not None: self.path(item, OBJECT) self.subjectDone(l_) l_ = self.store.value(l_, RDF.rest) def predicateList(self, subject, newline=False): properties = self.buildPredicateHash(subject) propList = self.sortProperties(properties) if len(propList) == 0: return self.verb(propList[0], newline=newline) self.objectList(properties[propList[0]]) for predicate in propList[1:]: self.write(" ;\n" + self.indent(1)) self.verb(predicate, newline=True) self.objectList(properties[predicate]) def verb(self, node, newline=False): self.path(node, VERB, newline) def objectList(self, objects): count = len(objects) if count == 0: return depthmod = (count == 1) and 0 or 1 self.depth += depthmod self.path(objects[0], OBJECT) for obj in objects[1:]: self.write(",\n" + self.indent(1)) self.path(obj, OBJECT, newline=True) self.depth -= depthmod rdflib-6.1.1/rdflib/plugins/serializers/xmlwriter.py000066400000000000000000000064451415774155300226740ustar00rootroot00000000000000import codecs from xml.sax.saxutils import quoteattr, escape __all__ = ["XMLWriter"] ESCAPE_ENTITIES = {"\r": " "} class XMLWriter(object): def __init__(self, stream, namespace_manager, encoding=None, decl=1, extra_ns=None): encoding = encoding or "utf-8" encoder, decoder, stream_reader, stream_writer = codecs.lookup(encoding) self.stream = stream = stream_writer(stream) if decl: stream.write('' % encoding) self.element_stack = [] self.nm = namespace_manager self.extra_ns = extra_ns or {} self.closed = True def __get_indent(self): return " " * len(self.element_stack) indent = property(__get_indent) def __close_start_tag(self): if not self.closed: # TODO: self.closed = True self.stream.write(">") def push(self, uri): self.__close_start_tag() write = self.stream.write write("\n") write(self.indent) write("<%s" % self.qname(uri)) self.element_stack.append(uri) self.closed = False self.parent = False def pop(self, uri=None): top = self.element_stack.pop() if uri: assert uri == top write = self.stream.write if not self.closed: self.closed = True write("/>") else: if self.parent: write("\n") write(self.indent) write("" % self.qname(top)) self.parent = True def element(self, uri, content, attributes={}): """Utility method for adding a complete simple element""" self.push(uri) for k, v in attributes.items(): self.attribute(k, v) self.text(content) self.pop() def namespaces(self, namespaces=None): if not namespaces: namespaces = self.nm.namespaces() write = self.stream.write write("\n") for prefix, namespace in namespaces: if prefix: write(' xmlns:%s="%s"\n' % (prefix, namespace)) # Allow user-provided namespace bindings to prevail elif prefix not in self.extra_ns: write(' xmlns="%s"\n' % namespace) for prefix, namespace in self.extra_ns.items(): if prefix: write(' xmlns:%s="%s"\n' % (prefix, namespace)) else: write(' xmlns="%s"\n' % namespace) def attribute(self, uri, value): write = self.stream.write write(" %s=%s" % (self.qname(uri), quoteattr(value))) def text(self, text): self.__close_start_tag() if "<" in text and ">" in text and "]]>" not in text: self.stream.write("") else: self.stream.write(escape(text, ESCAPE_ENTITIES)) def qname(self, uri): """Compute qname for a uri using our extra namespaces, or the given namespace manager""" for pre, ns in self.extra_ns.items(): if uri.startswith(ns): if pre != "": return ":".join(pre, uri[len(ns) :]) else: return uri[len(ns) :] return self.nm.qname_strict(uri) rdflib-6.1.1/rdflib/plugins/shared/000077500000000000000000000000001415774155300171665ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/shared/__init__.py000066400000000000000000000000001415774155300212650ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/shared/jsonld/000077500000000000000000000000001415774155300204575ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/shared/jsonld/__init__.py000066400000000000000000000000001415774155300225560ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/shared/jsonld/context.py000066400000000000000000000376221415774155300225270ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Implementation of the JSON-LD Context structure. See: http://json-ld.org/ """ # https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/context.py from collections import namedtuple from rdflib.namespace import RDF from .keys import ( BASE, CONTAINER, CONTEXT, GRAPH, ID, IMPORT, INCLUDED, INDEX, JSON, LANG, LIST, NEST, NONE, PREFIX, PROPAGATE, PROTECTED, REV, SET, TYPE, VALUE, VERSION, VOCAB, ) from .errors import ( INVALID_REMOTE_CONTEXT, RECURSIVE_CONTEXT_INCLUSION, INVALID_CONTEXT_ENTRY, ) from .util import source_to_json, urljoin, urlsplit, split_iri, norm_url NODE_KEYS = {GRAPH, ID, INCLUDED, JSON, LIST, NEST, NONE, REV, SET, TYPE, VALUE, LANG} class Defined(int): pass UNDEF = Defined(0) # From URI_GEN_DELIMS = (":", "/", "?", "#", "[", "]", "@") class Context(object): def __init__(self, source=None, base=None, version=None): self.version = version or 1.0 self.language = None self.vocab = None self.base = base self.doc_base = base self.terms = {} # _alias maps NODE_KEY to list of aliases self._alias = {} self._lookup = {} self._prefixes = {} self.active = False self.parent = None self.propagate = True self._context_cache = {} if source: self.load(source) @property def base(self): return self._base @base.setter def base(self, base): if base: hash_index = base.find("#") if hash_index > -1: base = base[0:hash_index] self._base = ( self.resolve_iri(base) if (hasattr(self, "_base") and base is not None) else base ) self._basedomain = "%s://%s" % urlsplit(base)[0:2] if base else None def subcontext(self, source, propagate=True): # IMPROVE: to optimize, implement SubContext with parent fallback support parent = self.parent if self.propagate is False else self return parent._subcontext(source, propagate) def _subcontext(self, source, propagate): ctx = Context(version=self.version) ctx.propagate = propagate ctx.parent = self ctx.language = self.language ctx.vocab = self.vocab ctx.base = self.base ctx.doc_base = self.doc_base ctx._alias = {k: l[:] for k, l in self._alias.items()} ctx.terms = self.terms.copy() ctx._lookup = self._lookup.copy() ctx._prefixes = self._prefixes.copy() ctx._context_cache = self._context_cache ctx.load(source) return ctx def _clear(self): self.language = None self.vocab = None self.terms = {} self._alias = {} self._lookup = {} self._prefixes = {} self.active = False self.propagate = True def get_context_for_term(self, term): if term and term.context is not UNDEF: return self._subcontext(term.context, propagate=True) return self def get_context_for_type(self, node): if self.version >= 1.1: rtype = self.get_type(node) if isinstance(node, dict) else None if not isinstance(rtype, list): rtype = [rtype] if rtype else [] for rt in rtype: typeterm = self.terms.get(rt) if typeterm: break else: typeterm = None if typeterm and typeterm.context: subcontext = self.subcontext(typeterm.context, propagate=False) if subcontext: return subcontext return self.parent if self.propagate is False else self def get_id(self, obj): return self._get(obj, ID) def get_type(self, obj): return self._get(obj, TYPE) def get_language(self, obj): return self._get(obj, LANG) def get_value(self, obj): return self._get(obj, VALUE) def get_graph(self, obj): return self._get(obj, GRAPH) def get_list(self, obj): return self._get(obj, LIST) def get_set(self, obj): return self._get(obj, SET) def get_rev(self, obj): return self._get(obj, REV) def _get(self, obj, key): for alias in self._alias.get(key, []): if alias in obj: return obj.get(alias) return obj.get(key) def get_key(self, key): for alias in self.get_keys(key): return alias def get_keys(self, key): if key in self._alias: for alias in self._alias[key]: yield alias yield key lang_key = property(lambda self: self.get_key(LANG)) id_key = property(lambda self: self.get_key(ID)) type_key = property(lambda self: self.get_key(TYPE)) value_key = property(lambda self: self.get_key(VALUE)) list_key = property(lambda self: self.get_key(LIST)) rev_key = property(lambda self: self.get_key(REV)) graph_key = property(lambda self: self.get_key(GRAPH)) def add_term( self, name, idref, coercion=UNDEF, container=UNDEF, index=None, language=UNDEF, reverse=False, context=UNDEF, prefix=None, protected=False, ): if self.version < 1.1 or prefix is None: prefix = isinstance(idref, str) and idref.endswith(URI_GEN_DELIMS) if not self._accept_term(name): return if self.version >= 1.1: existing = self.terms.get(name) if existing and existing.protected: return if isinstance(container, (list, set, tuple)): container = set(container) else: container = set([container]) term = Term( idref, name, coercion, container, index, language, reverse, context, prefix, protected, ) self.terms[name] = term for container_key in (LIST, LANG, SET): # , INDEX, ID, GRAPH): if container_key in container: break else: container_key = UNDEF self._lookup[(idref, coercion or language, container_key, reverse)] = term if term.prefix is True: self._prefixes[idref] = name def find_term( self, idref, coercion=None, container=UNDEF, language=None, reverse=False ): lu = self._lookup if coercion is None: coercion = language if coercion is not UNDEF and container: found = lu.get((idref, coercion, container, reverse)) if found: return found if coercion is not UNDEF: found = lu.get((idref, coercion, UNDEF, reverse)) if found: return found if container: found = lu.get((idref, coercion, container, reverse)) if found: return found elif language: found = lu.get((idref, UNDEF, LANG, reverse)) if found: return found else: found = lu.get((idref, coercion or UNDEF, SET, reverse)) if found: return found return lu.get((idref, UNDEF, UNDEF, reverse)) def resolve(self, curie_or_iri): iri = self.expand(curie_or_iri, False) if self.isblank(iri): return iri if " " in iri: return "" return self.resolve_iri(iri) def resolve_iri(self, iri): return norm_url(self._base, iri) def isblank(self, ref): return ref.startswith("_:") def expand(self, term_curie_or_iri, use_vocab=True): if not isinstance(term_curie_or_iri, str): return term_curie_or_iri if not self._accept_term(term_curie_or_iri): return "" if use_vocab: term = self.terms.get(term_curie_or_iri) if term: return term.id is_term, pfx, local = self._prep_expand(term_curie_or_iri) if pfx == "_": return term_curie_or_iri if pfx is not None: ns = self.terms.get(pfx) if ns and ns.prefix and ns.id: return ns.id + local elif is_term and use_vocab: if self.vocab: return self.vocab + term_curie_or_iri return None return self.resolve_iri(term_curie_or_iri) def shrink_iri(self, iri): ns, name = split_iri(str(iri)) pfx = self._prefixes.get(ns) if pfx: return ":".join((pfx, name)) elif self._base: if str(iri) == self._base: return "" elif iri.startswith(self._basedomain): return iri[len(self._basedomain) :] return iri def to_symbol(self, iri): iri = str(iri) term = self.find_term(iri) if term: return term.name ns, name = split_iri(iri) if ns == self.vocab: return name pfx = self._prefixes.get(ns) if pfx: return ":".join((pfx, name)) return iri def load(self, source, base=None, referenced_contexts=None): self.active = True sources = [] source = source if isinstance(source, list) else [source] referenced_contexts = referenced_contexts or set() self._prep_sources(base, source, sources, referenced_contexts) for source_url, source in sources: if source is None: self._clear() else: self._read_source(source, source_url, referenced_contexts) def _accept_term(self, key): if self.version < 1.1: return True if key and len(key) > 1 and key[0] == "@" and key[1].isalnum(): return key in NODE_KEYS else: return True def _prep_sources( self, base, inputs, sources, referenced_contexts, in_source_url=None ): for source in inputs: source_url = in_source_url if isinstance(source, str): source_url = source source_doc_base = base or self.doc_base new_ctx = self._fetch_context( source, source_doc_base, referenced_contexts ) if new_ctx is None: continue else: if base: base = urljoin(source_doc_base, source_url) source = new_ctx if isinstance(source, dict): if CONTEXT in source: source = source[CONTEXT] source = source if isinstance(source, list) else [source] if isinstance(source, list): self._prep_sources( base, source, sources, referenced_contexts, source_url ) else: sources.append((source_url, source)) def _fetch_context(self, source, base, referenced_contexts): source_url = urljoin(base, source) if source_url in referenced_contexts: raise RECURSIVE_CONTEXT_INCLUSION referenced_contexts.add(source_url) if source_url in self._context_cache: return self._context_cache[source_url] source = source_to_json(source_url) if source and CONTEXT not in source: raise INVALID_REMOTE_CONTEXT self._context_cache[source_url] = source return source def _read_source(self, source, source_url=None, referenced_contexts=None): imports = source.get(IMPORT) if imports: if not isinstance(imports, str): raise INVALID_CONTEXT_ENTRY imported = self._fetch_context( imports, self.base, referenced_contexts or set() ) if not isinstance(imported, dict): raise INVALID_CONTEXT_ENTRY imported = imported[CONTEXT] imported.update(source) source = imported self.vocab = source.get(VOCAB, self.vocab) self.version = source.get(VERSION, self.version) protected = source.get(PROTECTED, False) for key, value in source.items(): if key in {VOCAB, VERSION, IMPORT, PROTECTED}: continue elif key == PROPAGATE and isinstance(value, bool): self.propagate = value elif key == LANG: self.language = value elif key == BASE: if not source_url and not imports: self.base = value else: self._read_term(source, key, value, protected) def _read_term(self, source, name, dfn, protected=False): idref = None if isinstance(dfn, dict): # term = self._create_term(source, key, value) rev = dfn.get(REV) protected = dfn.get(PROTECTED, protected) coercion = dfn.get(TYPE, UNDEF) if coercion and coercion not in (ID, TYPE, VOCAB): coercion = self._rec_expand(source, coercion) idref = rev or dfn.get(ID, UNDEF) if idref == TYPE: idref = str(RDF.type) coercion = VOCAB elif idref is not UNDEF: idref = self._rec_expand(source, idref) elif ":" in name: idref = self._rec_expand(source, name) elif self.vocab: idref = self.vocab + name context = dfn.get(CONTEXT, UNDEF) self.add_term( name, idref, coercion, dfn.get(CONTAINER, UNDEF), dfn.get(INDEX, UNDEF), dfn.get(LANG, UNDEF), bool(rev), context, dfn.get(PREFIX), protected=protected, ) else: if isinstance(dfn, str): if not self._accept_term(dfn): return idref = self._rec_expand(source, dfn) self.add_term(name, idref, protected=protected) if idref in NODE_KEYS: self._alias.setdefault(idref, []).append(name) def _rec_expand(self, source, expr, prev=None): if expr == prev or expr in NODE_KEYS: return expr is_term, pfx, nxt = self._prep_expand(expr) if pfx: iri = self._get_source_id(source, pfx) if iri is None: if pfx + ":" == self.vocab: return expr else: term = self.terms.get(pfx) if term: iri = term.id if iri is None: nxt = expr else: nxt = iri + nxt else: nxt = self._get_source_id(source, nxt) or nxt if ":" not in nxt and self.vocab: return self.vocab + nxt return self._rec_expand(source, nxt, expr) def _prep_expand(self, expr): if ":" not in expr: return True, None, expr pfx, local = expr.split(":", 1) if not local.startswith("//"): return False, pfx, local else: return False, None, expr def _get_source_id(self, source, key): # .. from source dict or if already defined term = source.get(key) if term is None: dfn = self.terms.get(key) if dfn: term = dfn.id elif isinstance(term, dict): term = term.get(ID) return term Term = namedtuple( "Term", "id, name, type, container, index, language, reverse, context," "prefix, protected", ) Term.__new__.__defaults__ = (UNDEF, UNDEF, UNDEF, UNDEF, False, UNDEF, False, False) # type: ignore[attr-defined] rdflib-6.1.1/rdflib/plugins/shared/jsonld/errors.py000066400000000000000000000006751415774155300223550ustar00rootroot00000000000000# -*- coding: utf-8 -*- # https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/errors.py class JSONLDException(ValueError): pass # http://www.w3.org/TR/json-ld-api/#idl-def-JsonLdErrorCode.{code-message} RECURSIVE_CONTEXT_INCLUSION = JSONLDException("recursive context inclusion") INVALID_REMOTE_CONTEXT = JSONLDException("invalid remote context") INVALID_CONTEXT_ENTRY = JSONLDException("invalid context entry") rdflib-6.1.1/rdflib/plugins/shared/jsonld/keys.py000066400000000000000000000010561415774155300220060ustar00rootroot00000000000000# -*- coding: utf-8 -*- # https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/keys.py BASE = "@base" CONTAINER = "@container" CONTEXT = "@context" # DIRECTION = u'@direction' GRAPH = "@graph" ID = "@id" IMPORT = "@import" INCLUDED = "@included" INDEX = "@index" JSON = "@json" LANG = LANGUAGE = "@language" LIST = "@list" NEST = "@nest" NONE = "@none" PREFIX = "@prefix" PROPAGATE = "@propagate" PROTECTED = "@protected" REV = REVERSE = "@reverse" SET = "@set" TYPE = "@type" VALUE = "@value" VERSION = "@version" VOCAB = "@vocab" rdflib-6.1.1/rdflib/plugins/shared/jsonld/util.py000066400000000000000000000056031415774155300220120ustar00rootroot00000000000000# -*- coding: utf-8 -*- # https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/util.py import typing as t if t.TYPE_CHECKING: import json else: try: import json assert json # workaround for pyflakes issue #13 except ImportError: import simplejson as json from posixpath import sep from posixpath import normpath from urllib.parse import urljoin, urlsplit, urlunsplit from rdflib.parser import create_input_source, PythonInputSource, StringInputSource from io import TextIOBase, TextIOWrapper def source_to_json(source): if isinstance(source, PythonInputSource): return source.data if isinstance(source, StringInputSource): return json.load(source.getCharacterStream()) # TODO: conneg for JSON (fix support in rdflib's URLInputSource!) source = create_input_source(source, format="json-ld") stream = source.getByteStream() try: # Use character stream as-is, or interpret byte stream as UTF-8 if isinstance(stream, TextIOBase): use_stream = stream else: use_stream = TextIOWrapper(stream, encoding='utf-8') return json.load(use_stream) finally: stream.close() VOCAB_DELIMS = ("#", "/", ":") def split_iri(iri): for delim in VOCAB_DELIMS: at = iri.rfind(delim) if at > -1: return iri[: at + 1], iri[at + 1 :] return iri, None def norm_url(base, url): """ >>> norm_url('http://example.org/', '/one') 'http://example.org/one' >>> norm_url('http://example.org/', '/one#') 'http://example.org/one#' >>> norm_url('http://example.org/one', 'two') 'http://example.org/two' >>> norm_url('http://example.org/one/', 'two') 'http://example.org/one/two' >>> norm_url('http://example.org/', 'http://example.net/one') 'http://example.net/one' >>> norm_url('http://example.org/', 'http://example.org//one') 'http://example.org//one' """ if "://" in url: return url parts = urlsplit(urljoin(base, url)) path = normpath(parts[2]) if sep != "/": path = "/".join(path.split(sep)) if parts[2].endswith("/") and not path.endswith("/"): path += "/" result = urlunsplit(parts[0:2] + (path,) + parts[3:]) if url.endswith("#") and not result.endswith("#"): result += "#" return result def context_from_urlinputsource(source): if source.content_type == "application/json": # response_info was added to InputSource in rdflib 4.2 try: links = source.response_info.getallmatchingheaders("Link") except AttributeError: return for link in links: if ' rel="http://www.w3.org/ns/json-ld#context"' in link: i, j = link.index("<"), link.index(">") if i > -1 and j > -1: return urljoin(source.url, link[i + 1 : j]) rdflib-6.1.1/rdflib/plugins/sparql/000077500000000000000000000000001415774155300172225ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/sparql/__init__.py000066400000000000000000000023241415774155300213340ustar00rootroot00000000000000""" SPARQL implementation for RDFLib .. versionadded:: 4.0 """ SPARQL_LOAD_GRAPHS = True """ If True, using FROM and FROM NAMED will load/parse more data """ SPARQL_DEFAULT_GRAPH_UNION = True """ If True - the default graph in the RDF Dataset is the union of all named graphs (like RDFLib's ConjunctiveGraph) """ CUSTOM_EVALS = {} """ Custom evaluation functions These must be functions taking (ctx, part) and raise NotImplementedError if they cannot handle a certain part """ PLUGIN_ENTRY_POINT = "rdf.plugins.sparqleval" import sys from . import parser from . import operators from . import parserutils from .processor import prepareQuery, processUpdate assert parser assert operators assert parserutils if sys.version_info < (3, 8): from importlib_metadata import entry_points else: from importlib.metadata import entry_points all_entry_points = entry_points() if isinstance(all_entry_points, dict): # Prior to Python 3.10, this returns a dict instead of the selection interface for ep in all_entry_points.get(PLUGIN_ENTRY_POINT, []): CUSTOM_EVALS[ep.name] = ep.load() else: for ep in all_entry_points.select(group=PLUGIN_ENTRY_POINT): CUSTOM_EVALS[ep.name] = ep.load() rdflib-6.1.1/rdflib/plugins/sparql/aggregates.py000066400000000000000000000165071415774155300217160ustar00rootroot00000000000000from rdflib import Literal, XSD from rdflib.plugins.sparql.evalutils import _eval, NotBoundError, _val from rdflib.plugins.sparql.operators import numeric from rdflib.plugins.sparql.datatypes import type_promotion from rdflib.plugins.sparql.sparql import SPARQLTypeError from decimal import Decimal """ Aggregation functions """ class Accumulator(object): """abstract base class for different aggregation functions""" def __init__(self, aggregation): self.var = aggregation.res self.expr = aggregation.vars if not aggregation.distinct: self.use_row = self.dont_care self.distinct = False else: self.distinct = aggregation.distinct self.seen = set() def dont_care(self, row): """skips distinct test""" return True def use_row(self, row): """tests distinct with set""" return _eval(self.expr, row) not in self.seen def set_value(self, bindings): """sets final value in bindings""" bindings[self.var] = self.get_value() class Counter(Accumulator): def __init__(self, aggregation): super(Counter, self).__init__(aggregation) self.value = 0 if self.expr == "*": # cannot eval "*" => always use the full row self.eval_row = self.eval_full_row def update(self, row, aggregator): try: val = self.eval_row(row) except NotBoundError: # skip UNDEF return self.value += 1 if self.distinct: self.seen.add(val) def get_value(self): return Literal(self.value) def eval_row(self, row): return _eval(self.expr, row) def eval_full_row(self, row): return row def use_row(self, row): return self.eval_row(row) not in self.seen def type_safe_numbers(*args): if any(isinstance(arg, float) for arg in args) and any( isinstance(arg, Decimal) for arg in args ): return map(float, args) return args class Sum(Accumulator): def __init__(self, aggregation): super(Sum, self).__init__(aggregation) self.value = 0 self.datatype = None def update(self, row, aggregator): try: value = _eval(self.expr, row) dt = self.datatype if dt is None: dt = value.datatype else: dt = type_promotion(dt, value.datatype) self.datatype = dt self.value = sum(type_safe_numbers(self.value, numeric(value))) if self.distinct: self.seen.add(value) except NotBoundError: # skip UNDEF pass def get_value(self): return Literal(self.value, datatype=self.datatype) class Average(Accumulator): def __init__(self, aggregation): super(Average, self).__init__(aggregation) self.counter = 0 self.sum = 0 self.datatype = None def update(self, row, aggregator): try: value = _eval(self.expr, row) dt = self.datatype self.sum = sum(type_safe_numbers(self.sum, numeric(value))) if dt is None: dt = value.datatype else: dt = type_promotion(dt, value.datatype) self.datatype = dt if self.distinct: self.seen.add(value) self.counter += 1 # skip UNDEF or BNode => SPARQLTypeError except NotBoundError: pass except SPARQLTypeError: pass def get_value(self): if self.counter == 0: return Literal(0) if self.datatype in (XSD.float, XSD.double): return Literal(self.sum / self.counter) else: return Literal(Decimal(self.sum) / Decimal(self.counter)) class Extremum(Accumulator): """abstract base class for Minimum and Maximum""" def __init__(self, aggregation): super(Extremum, self).__init__(aggregation) self.value = None # DISTINCT would not change the value for MIN or MAX self.use_row = self.dont_care def set_value(self, bindings): if self.value is not None: # simply do not set if self.value is still None bindings[self.var] = Literal(self.value) def update(self, row, aggregator): try: if self.value is None: self.value = _eval(self.expr, row) else: # self.compare is implemented by Minimum/Maximum self.value = self.compare(self.value, _eval(self.expr, row)) # skip UNDEF or BNode => SPARQLTypeError except NotBoundError: pass except SPARQLTypeError: pass class Minimum(Extremum): def compare(self, val1, val2): return min(val1, val2, key=_val) class Maximum(Extremum): def compare(self, val1, val2): return max(val1, val2, key=_val) class Sample(Accumulator): """takes the first eligible value""" def __init__(self, aggregation): super(Sample, self).__init__(aggregation) # DISTINCT would not change the value self.use_row = self.dont_care def update(self, row, aggregator): try: # set the value now aggregator.bindings[self.var] = _eval(self.expr, row) # and skip this accumulator for future rows del aggregator.accumulators[self.var] except NotBoundError: pass def get_value(self): # set None if no value was set return None class GroupConcat(Accumulator): def __init__(self, aggregation): super(GroupConcat, self).__init__(aggregation) # only GROUPCONCAT needs to have a list as accumulator self.value = [] self.separator = aggregation.separator or " " def update(self, row, aggregator): try: value = _eval(self.expr, row) self.value.append(value) if self.distinct: self.seen.add(value) # skip UNDEF except NotBoundError: pass def get_value(self): return Literal(self.separator.join(str(v) for v in self.value)) class Aggregator(object): """combines different Accumulator objects""" accumulator_classes = { "Aggregate_Count": Counter, "Aggregate_Sample": Sample, "Aggregate_Sum": Sum, "Aggregate_Avg": Average, "Aggregate_Min": Minimum, "Aggregate_Max": Maximum, "Aggregate_GroupConcat": GroupConcat, } def __init__(self, aggregations): self.bindings = {} self.accumulators = {} for a in aggregations: accumulator_class = self.accumulator_classes.get(a.name) if accumulator_class is None: raise Exception("Unknown aggregate function " + a.name) self.accumulators[a.res] = accumulator_class(a) def update(self, row): """update all own accumulators""" # SAMPLE accumulators may delete themselves # => iterate over list not generator for acc in list(self.accumulators.values()): if acc.use_row(row): acc.update(row, self) def get_bindings(self): """calculate and set last values""" for acc in self.accumulators.values(): acc.set_value(self.bindings) return self.bindings rdflib-6.1.1/rdflib/plugins/sparql/algebra.py000066400000000000000000001415341415774155300212010ustar00rootroot00000000000000""" Converting the 'parse-tree' output of pyparsing to a SPARQL Algebra expression http://www.w3.org/TR/sparql11-query/#sparqlQuery """ import functools import operator import collections from functools import reduce from rdflib import Literal, Variable, URIRef, BNode from rdflib.plugins.sparql.sparql import Prologue, Query from rdflib.plugins.sparql.parserutils import CompValue, Expr from rdflib.plugins.sparql.operators import ( and_, TrueFilter, simplify as simplifyFilters, ) from rdflib.paths import InvPath, AlternativePath, SequencePath, MulPath, NegatedPath from pyparsing import ParseResults # --------------------------- # Some convenience methods from rdflib.term import Identifier def OrderBy(p, expr): return CompValue("OrderBy", p=p, expr=expr) def ToMultiSet(p): return CompValue("ToMultiSet", p=p) def Union(p1, p2): return CompValue("Union", p1=p1, p2=p2) def Join(p1, p2): return CompValue("Join", p1=p1, p2=p2) def Minus(p1, p2): return CompValue("Minus", p1=p1, p2=p2) def Graph(term, graph): return CompValue("Graph", term=term, p=graph) def BGP(triples=None): return CompValue("BGP", triples=triples or []) def LeftJoin(p1, p2, expr): return CompValue("LeftJoin", p1=p1, p2=p2, expr=expr) def Filter(expr, p): return CompValue("Filter", expr=expr, p=p) def Extend(p, expr, var): return CompValue("Extend", p=p, expr=expr, var=var) def Values(res): return CompValue("values", res=res) def Project(p, PV): return CompValue("Project", p=p, PV=PV) def Group(p, expr=None): return CompValue("Group", p=p, expr=expr) def _knownTerms(triple, varsknown, varscount): return ( len( [ x for x in triple if x not in varsknown and isinstance(x, (Variable, BNode)) ] ), -sum(varscount.get(x, 0) for x in triple), not isinstance(triple[2], Literal), ) def reorderTriples(l_): """ Reorder triple patterns so that we execute the ones with most bindings first """ def _addvar(term, varsknown): if isinstance(term, (Variable, BNode)): varsknown.add(term) l_ = [(None, x) for x in l_] varsknown = set() varscount = collections.defaultdict(int) for t in l_: for c in t[1]: if isinstance(c, (Variable, BNode)): varscount[c] += 1 i = 0 # Done in steps, sort by number of bound terms # the top block of patterns with the most bound terms is kept # the rest is resorted based on the vars bound after the first # block is evaluated # we sort by decorate/undecorate, since we need the value of the sort keys while i < len(l_): l_[i:] = sorted((_knownTerms(x[1], varsknown, varscount), x[1]) for x in l_[i:]) t = l_[i][0][0] # top block has this many terms bound j = 0 while i + j < len(l_) and l_[i + j][0][0] == t: for c in l_[i + j][1]: _addvar(c, varsknown) j += 1 i += 1 return [x[1] for x in l_] def triples(l): l = reduce(lambda x, y: x + y, l) if (len(l) % 3) != 0: raise Exception("these aint triples") return reorderTriples((l[x], l[x + 1], l[x + 2]) for x in range(0, len(l), 3)) def translatePName(p, prologue): """ Expand prefixed/relative URIs """ if isinstance(p, CompValue): if p.name == "pname": return prologue.absolutize(p) if p.name == "literal": return Literal( p.string, lang=p.lang, datatype=prologue.absolutize(p.datatype) ) elif isinstance(p, URIRef): return prologue.absolutize(p) def translatePath(p): """ Translate PropertyPath expressions """ if isinstance(p, CompValue): if p.name == "PathAlternative": if len(p.part) == 1: return p.part[0] else: return AlternativePath(*p.part) elif p.name == "PathSequence": if len(p.part) == 1: return p.part[0] else: return SequencePath(*p.part) elif p.name == "PathElt": if not p.mod: return p.part else: if isinstance(p.part, list): if len(p.part) != 1: raise Exception("Denkfehler!") return MulPath(p.part[0], p.mod) else: return MulPath(p.part, p.mod) elif p.name == "PathEltOrInverse": if isinstance(p.part, list): if len(p.part) != 1: raise Exception("Denkfehler!") return InvPath(p.part[0]) else: return InvPath(p.part) elif p.name == "PathNegatedPropertySet": if isinstance(p.part, list): return NegatedPath(AlternativePath(*p.part)) else: return NegatedPath(p.part) def translateExists(e): """ Translate the graph pattern used by EXISTS and NOT EXISTS http://www.w3.org/TR/sparql11-query/#sparqlCollectFilters """ def _c(n): if isinstance(n, CompValue): if n.name in ("Builtin_EXISTS", "Builtin_NOTEXISTS"): n.graph = translateGroupGraphPattern(n.graph) if n.graph.name == "Filter": # filters inside (NOT) EXISTS can see vars bound outside n.graph.no_isolated_scope = True e = traverse(e, visitPost=_c) return e def collectAndRemoveFilters(parts): """ FILTER expressions apply to the whole group graph pattern in which they appear. http://www.w3.org/TR/sparql11-query/#sparqlCollectFilters """ filters = [] i = 0 while i < len(parts): p = parts[i] if p.name == "Filter": filters.append(translateExists(p.expr)) parts.pop(i) else: i += 1 if filters: return and_(*filters) return None def translateGroupOrUnionGraphPattern(graphPattern): A = None for g in graphPattern.graph: g = translateGroupGraphPattern(g) if not A: A = g else: A = Union(A, g) return A def translateGraphGraphPattern(graphPattern): return Graph(graphPattern.term, translateGroupGraphPattern(graphPattern.graph)) def translateInlineData(graphPattern): return ToMultiSet(translateValues(graphPattern)) def translateGroupGraphPattern(graphPattern): """ http://www.w3.org/TR/sparql11-query/#convertGraphPattern """ if graphPattern.name == "SubSelect": return ToMultiSet(translate(graphPattern)[0]) if not graphPattern.part: graphPattern.part = [] # empty { } filters = collectAndRemoveFilters(graphPattern.part) g = [] for p in graphPattern.part: if p.name == "TriplesBlock": # merge adjacent TripleBlocks if not (g and g[-1].name == "BGP"): g.append(BGP()) g[-1]["triples"] += triples(p.triples) else: g.append(p) G = BGP() for p in g: if p.name == "OptionalGraphPattern": A = translateGroupGraphPattern(p.graph) if A.name == "Filter": G = LeftJoin(G, A.p, A.expr) else: G = LeftJoin(G, A, TrueFilter) elif p.name == "MinusGraphPattern": G = Minus(p1=G, p2=translateGroupGraphPattern(p.graph)) elif p.name == "GroupOrUnionGraphPattern": G = Join(p1=G, p2=translateGroupOrUnionGraphPattern(p)) elif p.name == "GraphGraphPattern": G = Join(p1=G, p2=translateGraphGraphPattern(p)) elif p.name == "InlineData": G = Join(p1=G, p2=translateInlineData(p)) elif p.name == "ServiceGraphPattern": G = Join(p1=G, p2=p) elif p.name in ("BGP", "Extend"): G = Join(p1=G, p2=p) elif p.name == "Bind": G = Extend(G, p.expr, p.var) else: raise Exception( "Unknown part in GroupGraphPattern: %s - %s" % (type(p), p.name) ) if filters: G = Filter(expr=filters, p=G) return G class StopTraversal(Exception): def __init__(self, rv): self.rv = rv def _traverse(e, visitPre=lambda n: None, visitPost=lambda n: None): """ Traverse a parse-tree, visit each node if visit functions return a value, replace current node """ _e = visitPre(e) if _e is not None: return _e if e is None: return None if isinstance(e, (list, ParseResults)): return [_traverse(x, visitPre, visitPost) for x in e] elif isinstance(e, tuple): return tuple([_traverse(x, visitPre, visitPost) for x in e]) elif isinstance(e, CompValue): for k, val in e.items(): e[k] = _traverse(val, visitPre, visitPost) _e = visitPost(e) if _e is not None: return _e return e def _traverseAgg(e, visitor=lambda n, v: None): """ Traverse a parse-tree, visit each node if visit functions return a value, replace current node """ res = [] if isinstance(e, (list, ParseResults, tuple)): res = [_traverseAgg(x, visitor) for x in e] elif isinstance(e, CompValue): for k, val in e.items(): if val is not None: res.append(_traverseAgg(val, visitor)) return visitor(e, res) def traverse(tree, visitPre=lambda n: None, visitPost=lambda n: None, complete=None): """ Traverse tree, visit each node with visit function visit function may raise StopTraversal to stop traversal if complete!=None, it is returned on complete traversal, otherwise the transformed tree is returned """ try: r = _traverse(tree, visitPre, visitPost) if complete is not None: return complete return r except StopTraversal as st: return st.rv def _hasAggregate(x): """ Traverse parse(sub)Tree return true if any aggregates are used """ if isinstance(x, CompValue): if x.name.startswith("Aggregate_"): raise StopTraversal(True) def _aggs(e, A): """ Collect Aggregates in A replaces aggregates with variable references """ # TODO: nested Aggregates? if isinstance(e, CompValue) and e.name.startswith("Aggregate_"): A.append(e) aggvar = Variable("__agg_%d__" % len(A)) e["res"] = aggvar return aggvar def _findVars(x, res): """ Find all variables in a tree """ if isinstance(x, Variable): res.add(x) if isinstance(x, CompValue): if x.name == "Bind": res.add(x.var) return x # stop recursion and finding vars in the expr elif x.name == "SubSelect": if x.projection: res.update(v.var or v.evar for v in x.projection) return x def _addVars(x, children): """ find which variables may be bound by this part of the query """ if isinstance(x, Variable): return set([x]) elif isinstance(x, CompValue): if x.name == "RelationalExpression": x["_vars"] = set() elif x.name == "Extend": # vars only used in the expr for a bind should not be included x["_vars"] = reduce( operator.or_, [child for child, part in zip(children, x) if part != "expr"], set(), ) else: x["_vars"] = set(reduce(operator.or_, children, set())) if x.name == "SubSelect": if x.projection: s = set(v.var or v.evar for v in x.projection) else: s = set() return s return x["_vars"] return reduce(operator.or_, children, set()) def _sample(e, v=None): """ For each unaggregated variable V in expr Replace V with Sample(V) """ if isinstance(e, CompValue) and e.name.startswith("Aggregate_"): return e # do not replace vars in aggregates if isinstance(e, Variable) and v != e: return CompValue("Aggregate_Sample", vars=e) def _simplifyFilters(e): if isinstance(e, Expr): return simplifyFilters(e) def translateAggregates(q, M): E = [] A = [] # collect/replace aggs in : # select expr as ?var if q.projection: for v in q.projection: if v.evar: v.expr = traverse(v.expr, functools.partial(_sample, v=v.evar)) v.expr = traverse(v.expr, functools.partial(_aggs, A=A)) # having clause if traverse(q.having, _hasAggregate, complete=False): q.having = traverse(q.having, _sample) traverse(q.having, functools.partial(_aggs, A=A)) # order by if traverse(q.orderby, _hasAggregate, complete=False): q.orderby = traverse(q.orderby, _sample) traverse(q.orderby, functools.partial(_aggs, A=A)) # sample all other select vars # TODO: only allowed for vars in group-by? if q.projection: for v in q.projection: if v.var: rv = Variable("__agg_%d__" % (len(A) + 1)) A.append(CompValue("Aggregate_Sample", vars=v.var, res=rv)) E.append((rv, v.var)) return CompValue("AggregateJoin", A=A, p=M), E def translateValues(v): # if len(v.var)!=len(v.value): # raise Exception("Unmatched vars and values in ValueClause: "+str(v)) res = [] if not v.var: return res if not v.value: return res if not isinstance(v.value[0], list): for val in v.value: res.append({v.var[0]: val}) else: for vals in v.value: res.append(dict(zip(v.var, vals))) return Values(res) def translate(q): """ http://www.w3.org/TR/sparql11-query/#convertSolMod """ _traverse(q, _simplifyFilters) q.where = traverse(q.where, visitPost=translatePath) # TODO: Var scope test VS = set() traverse(q.where, functools.partial(_findVars, res=VS)) # all query types have a where part M = translateGroupGraphPattern(q.where) aggregate = False if q.groupby: conditions = [] # convert "GROUP BY (?expr as ?var)" to an Extend for c in q.groupby.condition: if isinstance(c, CompValue) and c.name == "GroupAs": M = Extend(M, c.expr, c.var) c = c.var conditions.append(c) M = Group(p=M, expr=conditions) aggregate = True elif ( traverse(q.having, _hasAggregate, complete=False) or traverse(q.orderby, _hasAggregate, complete=False) or any( traverse(x.expr, _hasAggregate, complete=False) for x in q.projection or [] if x.evar ) ): # if any aggregate is used, implicit group by M = Group(p=M) aggregate = True if aggregate: M, E = translateAggregates(q, M) else: E = [] # HAVING if q.having: M = Filter(expr=and_(*q.having.condition), p=M) # VALUES if q.valuesClause: M = Join(p1=M, p2=ToMultiSet(translateValues(q.valuesClause))) if not q.projection: # select * PV = list(VS) else: PV = list() for v in q.projection: if v.var: if v not in PV: PV.append(v.var) elif v.evar: if v not in PV: PV.append(v.evar) E.append((v.expr, v.evar)) else: raise Exception("I expected a var or evar here!") for e, v in E: M = Extend(M, e, v) # ORDER BY if q.orderby: M = OrderBy( M, [ CompValue("OrderCondition", expr=c.expr, order=c.order) for c in q.orderby.condition ], ) # PROJECT M = Project(M, PV) if q.modifier: if q.modifier == "DISTINCT": M = CompValue("Distinct", p=M) elif q.modifier == "REDUCED": M = CompValue("Reduced", p=M) if q.limitoffset: offset = 0 if q.limitoffset.offset is not None: offset = q.limitoffset.offset.toPython() if q.limitoffset.limit is not None: M = CompValue( "Slice", p=M, start=offset, length=q.limitoffset.limit.toPython() ) else: M = CompValue("Slice", p=M, start=offset) return M, PV def simplify(n): """Remove joins to empty BGPs""" if isinstance(n, CompValue): if n.name == "Join": if n.p1.name == "BGP" and len(n.p1.triples) == 0: return n.p2 if n.p2.name == "BGP" and len(n.p2.triples) == 0: return n.p1 elif n.name == "BGP": n["triples"] = reorderTriples(n.triples) return n def analyse(n, children): """ Some things can be lazily joined. This propegates whether they can up the tree and sets lazy flags for all joins """ if isinstance(n, CompValue): if n.name == "Join": n["lazy"] = all(children) return False elif n.name in ("Slice", "Distinct"): return False else: return all(children) else: return True def translatePrologue(p, base, initNs=None, prologue=None): if prologue is None: prologue = Prologue() prologue.base = "" if base: prologue.base = base if initNs: for k, v in initNs.items(): prologue.bind(k, v) for x in p: if x.name == "Base": prologue.base = x.iri elif x.name == "PrefixDecl": prologue.bind(x.prefix, prologue.absolutize(x.iri)) return prologue def translateQuads(quads): if quads.triples: alltriples = triples(quads.triples) else: alltriples = [] allquads = collections.defaultdict(list) if quads.quadsNotTriples: for q in quads.quadsNotTriples: if q.triples: allquads[q.term] += triples(q.triples) return alltriples, allquads def translateUpdate1(u, prologue): if u.name in ("Load", "Clear", "Drop", "Create"): pass # no translation needed elif u.name in ("Add", "Move", "Copy"): pass elif u.name in ("InsertData", "DeleteData", "DeleteWhere"): t, q = translateQuads(u.quads) u["quads"] = q u["triples"] = t if u.name in ("DeleteWhere", "DeleteData"): pass # TODO: check for bnodes in triples elif u.name == "Modify": if u.delete: u.delete["triples"], u.delete["quads"] = translateQuads(u.delete.quads) if u.insert: u.insert["triples"], u.insert["quads"] = translateQuads(u.insert.quads) u["where"] = translateGroupGraphPattern(u.where) else: raise Exception("Unknown type of update operation: %s" % u) u.prologue = prologue return u def translateUpdate(q, base=None, initNs=None): """ Returns a list of SPARQL Update Algebra expressions """ res = [] prologue = None if not q.request: return res for p, u in zip(q.prologue, q.request): prologue = translatePrologue(p, base, initNs, prologue) # absolutize/resolve prefixes u = traverse(u, visitPost=functools.partial(translatePName, prologue=prologue)) u = _traverse(u, _simplifyFilters) u = traverse(u, visitPost=translatePath) res.append(translateUpdate1(u, prologue)) return res def translateQuery(q, base=None, initNs=None): """ Translate a query-parsetree to a SPARQL Algebra Expression Return a rdflib.plugins.sparql.sparql.Query object """ # We get in: (prologue, query) prologue = translatePrologue(q[0], base, initNs) # absolutize/resolve prefixes q[1] = traverse( q[1], visitPost=functools.partial(translatePName, prologue=prologue) ) P, PV = translate(q[1]) datasetClause = q[1].datasetClause if q[1].name == "ConstructQuery": template = triples(q[1].template) if q[1].template else None res = CompValue(q[1].name, p=P, template=template, datasetClause=datasetClause) else: res = CompValue(q[1].name, p=P, datasetClause=datasetClause, PV=PV) res = traverse(res, visitPost=simplify) _traverseAgg(res, visitor=analyse) _traverseAgg(res, _addVars) return Query(prologue, res) class ExpressionNotCoveredException(Exception): pass def translateAlgebra(query_algebra: Query): """ :param query_algebra: An algebra returned by the function call algebra.translateQuery(parse_tree). :return: The query form generated from the SPARQL 1.1 algebra tree for select queries. """ import os def overwrite(text): file = open("query.txt", "w+") file.write(text) file.close() def replace( old, new, search_from_match: str = None, search_from_match_occurrence: int = None, count: int = 1, ): # Read in the file with open("query.txt", "r") as file: filedata = file.read() def find_nth(haystack, needle, n): start = haystack.lower().find(needle) while start >= 0 and n > 1: start = haystack.lower().find(needle, start + len(needle)) n -= 1 return start if search_from_match and search_from_match_occurrence: position = find_nth( filedata, search_from_match, search_from_match_occurrence ) filedata_pre = filedata[:position] filedata_post = filedata[position:].replace(old, new, count) filedata = filedata_pre + filedata_post else: filedata = filedata.replace(old, new, count) # Write the file out again with open("query.txt", "w") as file: file.write(filedata) def convert_node_arg(node_arg): if isinstance(node_arg, Identifier): return node_arg.n3() elif isinstance(node_arg, CompValue): return "{" + node_arg.name + "}" elif isinstance(node_arg, Expr): return "{" + node_arg.name + "}" elif isinstance(node_arg, str): return node_arg else: raise ExpressionNotCoveredException( "The expression {0} might not be covered yet.".format(node_arg) ) def sparql_query_text(node): """ https://www.w3.org/TR/sparql11-query/#sparqlSyntax :param node: :return: """ if isinstance(node, CompValue): # 18.2 Query Forms if node.name == "SelectQuery": overwrite("-*-SELECT-*- " + "{" + node.p.name + "}") # 18.2 Graph Patterns elif node.name == "BGP": # Identifiers or Paths # Negated path throws a type error. Probably n3() method of negated paths should be fixed triples = "".join( triple[0].n3() + " " + triple[1].n3() + " " + triple[2].n3() + "." for triple in node.triples ) replace("{BGP}", triples) # The dummy -*-SELECT-*- is placed during a SelectQuery or Multiset pattern in order to be able # to match extended variables in a specific Select-clause (see "Extend" below) replace("-*-SELECT-*-", "SELECT", count=-1) # If there is no "Group By" clause the placeholder will simply be deleted. Otherwise there will be # no matching {GroupBy} placeholder because it has already been replaced by "group by variables" replace("{GroupBy}", "", count=-1) replace("{Having}", "", count=-1) elif node.name == "Join": replace("{Join}", "{" + node.p1.name + "}{" + node.p2.name + "}") # elif node.name == "LeftJoin": replace( "{LeftJoin}", "{" + node.p1.name + "}OPTIONAL{{" + node.p2.name + "}}", ) elif node.name == "Filter": if isinstance(node.expr, CompValue): expr = node.expr.name else: raise ExpressionNotCoveredException( "This expression might not be covered yet." ) if node.p: # Filter with p=AggregateJoin = Having if node.p.name == "AggregateJoin": replace("{Filter}", "{" + node.p.name + "}") replace("{Having}", "HAVING({" + expr + "})") else: replace( "{Filter}", "FILTER({" + expr + "}) {" + node.p.name + "}" ) else: replace("{Filter}", "FILTER({" + expr + "})") elif node.name == "Union": replace( "{Union}", "{{" + node.p1.name + "}}UNION{{" + node.p2.name + "}}" ) elif node.name == "Graph": expr = "GRAPH " + node.term.n3() + " {{" + node.p.name + "}}" replace("{Graph}", expr) elif node.name == "Extend": query_string = open("query.txt", "r").read().lower() select_occurrences = query_string.count("-*-select-*-") replace( node.var.n3(), "(" + convert_node_arg(node.expr) + " as " + node.var.n3() + ")", search_from_match="-*-select-*-", search_from_match_occurrence=select_occurrences, ) replace("{Extend}", "{" + node.p.name + "}") elif node.name == "Minus": expr = "{" + node.p1.name + "}MINUS{{" + node.p2.name + "}}" replace("{Minus}", expr) elif node.name == "Group": group_by_vars = [] if node.expr: for var in node.expr: if isinstance(var, Identifier): group_by_vars.append(var.n3()) else: raise ExpressionNotCoveredException( "This expression might not be covered yet." ) replace("{Group}", "{" + node.p.name + "}") replace("{GroupBy}", "GROUP BY " + " ".join(group_by_vars) + " ") else: replace("{Group}", "{" + node.p.name + "}") elif node.name == "AggregateJoin": replace("{AggregateJoin}", "{" + node.p.name + "}") for agg_func in node.A: if isinstance(agg_func.res, Identifier): identifier = agg_func.res.n3() else: raise ExpressionNotCoveredException( "This expression might not be covered yet." ) agg_func_name = agg_func.name.split("_")[1] distinct = "" if agg_func.distinct: distinct = agg_func.distinct + " " if agg_func_name == "GroupConcat": replace( identifier, "GROUP_CONCAT" + "(" + distinct + agg_func.vars.n3() + ";SEPARATOR=" + agg_func.separator.n3() + ")", ) else: replace( identifier, agg_func_name.upper() + "(" + distinct + convert_node_arg(agg_func.vars) + ")", ) # For non-aggregated variables the aggregation function "sample" is automatically assigned. # However, we do not want to have "sample" wrapped around non-aggregated variables. That is # why we replace it. If "sample" is used on purpose it will not be replaced as the alias # must be different from the variable in this case. replace( "(SAMPLE({0}) as {0})".format(convert_node_arg(agg_func.vars)), convert_node_arg(agg_func.vars), ) elif node.name == "GroupGraphPatternSub": replace( "GroupGraphPatternSub", " ".join([convert_node_arg(pattern) for pattern in node.part]), ) elif node.name == "TriplesBlock": print("triplesblock") replace( "{TriplesBlock}", "".join( triple[0].n3() + " " + triple[1].n3() + " " + triple[2].n3() + "." for triple in node.triples ), ) # 18.2 Solution modifiers elif node.name == "ToList": raise ExpressionNotCoveredException( "This expression might not be covered yet." ) elif node.name == "OrderBy": order_conditions = [] for c in node.expr: if isinstance(c.expr, Identifier): var = c.expr.n3() if c.order is not None: cond = var + "(" + c.order + ")" else: cond = var order_conditions.append(cond) else: raise ExpressionNotCoveredException( "This expression might not be covered yet." ) replace("{OrderBy}", "{" + node.p.name + "}") replace("{OrderConditions}", " ".join(order_conditions) + " ") elif node.name == "Project": project_variables = [] for var in node.PV: if isinstance(var, Identifier): project_variables.append(var.n3()) else: raise ExpressionNotCoveredException( "This expression might not be covered yet." ) order_by_pattern = "" if node.p.name == "OrderBy": order_by_pattern = "ORDER BY {OrderConditions}" replace( "{Project}", " ".join(project_variables) + "{{" + node.p.name + "}}" + "{GroupBy}" + order_by_pattern + "{Having}", ) elif node.name == "Distinct": replace("{Distinct}", "DISTINCT {" + node.p.name + "}") elif node.name == "Reduced": replace("{Reduced}", "REDUCED {" + node.p.name + "}") elif node.name == "Slice": slice = "OFFSET " + str(node.start) + " LIMIT " + str(node.length) replace("{Slice}", "{" + node.p.name + "}" + slice) elif node.name == "ToMultiSet": if node.p.name == "values": replace("{ToMultiSet}", "{{" + node.p.name + "}}") else: replace( "{ToMultiSet}", "{-*-SELECT-*- " + "{" + node.p.name + "}" + "}" ) # 18.2 Property Path # 17 Expressions and Testing Values # # 17.3 Operator Mapping elif node.name == "RelationalExpression": expr = convert_node_arg(node.expr) op = node.op if isinstance(list, type(node.other)): other = ( "(" + ", ".join(convert_node_arg(expr) for expr in node.other) + ")" ) else: other = convert_node_arg(node.other) condition = "{left} {operator} {right}".format( left=expr, operator=op, right=other ) replace("{RelationalExpression}", condition) elif node.name == "ConditionalAndExpression": inner_nodes = " && ".join( [convert_node_arg(expr) for expr in node.other] ) replace( "{ConditionalAndExpression}", convert_node_arg(node.expr) + " && " + inner_nodes, ) elif node.name == "ConditionalOrExpression": inner_nodes = " || ".join( [convert_node_arg(expr) for expr in node.other] ) replace( "{ConditionalOrExpression}", "(" + convert_node_arg(node.expr) + " || " + inner_nodes + ")", ) elif node.name == "MultiplicativeExpression": left_side = convert_node_arg(node.expr) multiplication = left_side for i, operator in enumerate(node.op): multiplication += ( operator + " " + convert_node_arg(node.other[i]) + " " ) replace("{MultiplicativeExpression}", multiplication) elif node.name == "AdditiveExpression": left_side = convert_node_arg(node.expr) addition = left_side for i, operator in enumerate(node.op): addition += operator + " " + convert_node_arg(node.other[i]) + " " replace("{AdditiveExpression}", addition) elif node.name == "UnaryNot": replace("{UnaryNot}", "!" + convert_node_arg(node.expr)) # # 17.4 Function Definitions # # # 17.4.1 Functional Forms elif node.name.endswith("BOUND"): bound_var = convert_node_arg(node.arg) replace("{Builtin_BOUND}", "bound(" + bound_var + ")") elif node.name.endswith("IF"): arg2 = convert_node_arg(node.arg2) arg3 = convert_node_arg(node.arg3) if_expression = ( "IF(" + "{" + node.arg1.name + "}, " + arg2 + ", " + arg3 + ")" ) replace("{Builtin_IF}", if_expression) elif node.name.endswith("COALESCE"): replace( "{Builtin_COALESCE}", "COALESCE(" + ", ".join(convert_node_arg(arg) for arg in node.arg) + ")", ) elif node.name.endswith("Builtin_EXISTS"): # The node's name which we get with node.graph.name returns "Join" instead of GroupGraphPatternSub # According to https://www.w3.org/TR/2013/REC-sparql11-query-20130321/#rExistsFunc # ExistsFunc can only have a GroupGraphPattern as parameter. However, when we print the query algebra # we get a GroupGraphPatternSub replace("{Builtin_EXISTS}", "EXISTS " + "{{" + node.graph.name + "}}") traverse(node.graph, visitPre=sparql_query_text) return node.graph elif node.name.endswith("Builtin_NOTEXISTS"): # The node's name which we get with node.graph.name returns "Join" instead of GroupGraphPatternSub # According to https://www.w3.org/TR/2013/REC-sparql11-query-20130321/#rNotExistsFunc # NotExistsFunc can only have a GroupGraphPattern as parameter. However, when we print the query algebra # we get a GroupGraphPatternSub print(node.graph.name) replace( "{Builtin_NOTEXISTS}", "NOT EXISTS " + "{{" + node.graph.name + "}}" ) traverse(node.graph, visitPre=sparql_query_text) return node.graph # # # # 17.4.1.5 logical-or: Covered in "RelationalExpression" # # # # 17.4.1.6 logical-and: Covered in "RelationalExpression" # # # # 17.4.1.7 RDFterm-equal: Covered in "RelationalExpression" elif node.name.endswith("sameTerm"): replace( "{Builtin_sameTerm}", "SAMETERM(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) # # # # IN: Covered in "RelationalExpression" # # # # NOT IN: Covered in "RelationalExpression" # # # 17.4.2 Functions on RDF Terms elif node.name.endswith("Builtin_isIRI"): replace("{Builtin_isIRI}", "isIRI(" + convert_node_arg(node.arg) + ")") elif node.name.endswith("Builtin_isBLANK"): replace( "{Builtin_isBLANK}", "isBLANK(" + convert_node_arg(node.arg) + ")" ) elif node.name.endswith("Builtin_isLITERAL"): replace( "{Builtin_isLITERAL}", "isLITERAL(" + convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_isNUMERIC"): replace( "{Builtin_isNUMERIC}", "isNUMERIC(" + convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_STR"): replace("{Builtin_STR}", "STR(" + convert_node_arg(node.arg) + ")") elif node.name.endswith("Builtin_LANG"): replace("{Builtin_LANG}", "LANG(" + convert_node_arg(node.arg) + ")") elif node.name.endswith("Builtin_DATATYPE"): replace( "{Builtin_DATATYPE}", "DATATYPE(" + convert_node_arg(node.arg) + ")" ) elif node.name.endswith("Builtin_IRI"): replace("{Builtin_IRI}", "IRI(" + convert_node_arg(node.arg) + ")") elif node.name.endswith("Builtin_BNODE"): replace("{Builtin_BNODE}", "BNODE(" + convert_node_arg(node.arg) + ")") elif node.name.endswith("STRDT"): replace( "{Builtin_STRDT}", "STRDT(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_STRLANG"): replace( "{Builtin_STRLANG}", "STRLANG(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_UUID"): replace("{Builtin_UUID}", "UUID()") elif node.name.endswith("Builtin_STRUUID"): replace("{Builtin_STRUUID}", "STRUUID()") # # # 17.4.3 Functions on Strings elif node.name.endswith("Builtin_STRLEN"): replace( "{Builtin_STRLEN}", "STRLEN(" + convert_node_arg(node.arg) + ")" ) elif node.name.endswith("Builtin_SUBSTR"): args = [node.arg.n3(), node.start] if node.length: args.append(node.length) expr = "SUBSTR(" + ", ".join(args) + ")" replace("{Builtin_SUBSTR}", expr) elif node.name.endswith("Builtin_UCASE"): replace("{Builtin_UCASE}", "UCASE(" + convert_node_arg(node.arg) + ")") elif node.name.endswith("Builtin_LCASE"): replace("{Builtin_LCASE}", "LCASE(" + convert_node_arg(node.arg) + ")") elif node.name.endswith("Builtin_STRSTARTS"): replace( "{Builtin_STRSTARTS}", "STRSTARTS(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_STRENDS"): replace( "{Builtin_STRENDS}", "STRENDS(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_CONTAINS"): replace( "{Builtin_CONTAINS}", "CONTAINS(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_STRBEFORE"): replace( "{Builtin_STRBEFORE}", "STRBEFORE(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_STRAFTER"): replace( "{Builtin_STRAFTER}", "STRAFTER(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("Builtin_ENCODE_FOR_URI"): replace( "{Builtin_ENCODE_FOR_URI}", "ENCODE_FOR_URI(" + convert_node_arg(node.arg) + ")", ) elif node.name.endswith("Builtin_CONCAT"): expr = "CONCAT({vars})".format( vars=", ".join(elem.n3() for elem in node.arg) ) replace("{Builtin_CONCAT}", expr) elif node.name.endswith("Builtin_LANGMATCHES"): replace( "{Builtin_LANGMATCHES}", "LANGMATCHES(" + convert_node_arg(node.arg1) + ", " + convert_node_arg(node.arg2) + ")", ) elif node.name.endswith("REGEX"): args = [convert_node_arg(node.text), convert_node_arg(node.pattern)] expr = "REGEX(" + ", ".join(args) + ")" replace("{Builtin_REGEX}", expr) elif node.name.endswith("REPLACE"): replace( "{Builtin_REPLACE}", "REPLACE(" + convert_node_arg(node.arg) + ", " + convert_node_arg(node.pattern) + ", " + convert_node_arg(node.replacement) + ")", ) # # # 17.4.4 Functions on Numerics elif node.name == "Builtin_ABS": replace("{Builtin_ABS}", "ABS(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_ROUND": replace("{Builtin_ROUND}", "ROUND(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_CEIL": replace("{Builtin_CEIL}", "CEIL(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_FLOOR": replace("{Builtin_FLOOR}", "FLOOR(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_RAND": replace("{Builtin_RAND}", "RAND()") # # # 17.4.5 Functions on Dates and Times elif node.name == "Builtin_NOW": replace("{Builtin_NOW}", "NOW()") elif node.name == "Builtin_YEAR": replace("{Builtin_YEAR}", "YEAR(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_MONTH": replace("{Builtin_MONTH}", "MONTH(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_DAY": replace("{Builtin_DAY}", "DAY(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_HOURS": replace("{Builtin_HOURS}", "HOURS(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_MINUTES": replace( "{Builtin_MINUTES}", "MINUTES(" + convert_node_arg(node.arg) + ")" ) elif node.name == "Builtin_SECONDS": replace( "{Builtin_SECONDS}", "SECONDS(" + convert_node_arg(node.arg) + ")" ) elif node.name == "Builtin_TIMEZONE": replace( "{Builtin_TIMEZONE}", "TIMEZONE(" + convert_node_arg(node.arg) + ")" ) elif node.name == "Builtin_TZ": replace("{Builtin_TZ}", "TZ(" + convert_node_arg(node.arg) + ")") # # # 17.4.6 Hash functions elif node.name == "Builtin_MD5": replace("{Builtin_MD5}", "MD5(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_SHA1": replace("{Builtin_SHA1}", "SHA1(" + convert_node_arg(node.arg) + ")") elif node.name == "Builtin_SHA256": replace( "{Builtin_SHA256}", "SHA256(" + convert_node_arg(node.arg) + ")" ) elif node.name == "Builtin_SHA384": replace( "{Builtin_SHA384}", "SHA384(" + convert_node_arg(node.arg) + ")" ) elif node.name == "Builtin_SHA512": replace( "{Builtin_SHA512}", "SHA512(" + convert_node_arg(node.arg) + ")" ) # Other elif node.name == "values": columns = [] for key in node.res[0].keys(): if isinstance(key, Identifier): columns.append(key.n3()) else: raise ExpressionNotCoveredException( "The expression {0} might not be covered yet.".format(key) ) values = "VALUES (" + " ".join(columns) + ")" rows = "" for elem in node.res: row = [] for term in elem.values(): if isinstance(term, Identifier): row.append( term.n3() ) # n3() is not part of Identifier class but every subclass has it elif isinstance(term, str): row.append(term) else: raise ExpressionNotCoveredException( "The expression {0} might not be covered yet.".format( term ) ) rows += "(" + " ".join(row) + ")" replace("values", values + "{" + rows + "}") elif node.name == "ServiceGraphPattern": replace( "{ServiceGraphPattern}", "SERVICE " + convert_node_arg(node.term) + "{" + node.graph.name + "}", ) traverse(node.graph, visitPre=sparql_query_text) return node.graph # else: # raise ExpressionNotCoveredException("The expression {0} might not be covered yet.".format(node.name)) traverse(query_algebra.algebra, visitPre=sparql_query_text) query_from_algebra = open("query.txt", "r").read() os.remove("query.txt") return query_from_algebra def pprintAlgebra(q): def pp(p, ind=" "): # if isinstance(p, list): # print "[ " # for x in p: pp(x,ind) # print "%s ]"%ind # return if not isinstance(p, CompValue): print(p) return print("%s(" % (p.name,)) for k in p: print( "%s%s =" % ( ind, k, ), end=" ", ) pp(p[k], ind + " ") print("%s)" % ind) try: pp(q.algebra) except AttributeError: # it's update, just a list for x in q: pp(x) if __name__ == "__main__": import sys from rdflib.plugins.sparql import parser import os.path if os.path.exists(sys.argv[1]): q = open(sys.argv[1]).read() else: q = sys.argv[1] pq = parser.parseQuery(q) print(pq) print("--------") tq = translateQuery(pq) pprintAlgebra(tq) rdflib-6.1.1/rdflib/plugins/sparql/datatypes.py000066400000000000000000000041031415774155300215700ustar00rootroot00000000000000""" Utility functions for supporting the XML Schema Datatypes hierarchy """ from rdflib import XSD XSD_DTs = set( ( XSD.integer, XSD.decimal, XSD.float, XSD.double, XSD.string, XSD.boolean, XSD.dateTime, XSD.nonPositiveInteger, XSD.negativeInteger, XSD.long, XSD.int, XSD.short, XSD.byte, XSD.nonNegativeInteger, XSD.unsignedLong, XSD.unsignedInt, XSD.unsignedShort, XSD.unsignedByte, XSD.positiveInteger, XSD.date, ) ) ### adding dateTime datatypes XSD_DateTime_DTs = set((XSD.dateTime, XSD.date, XSD.time)) XSD_Duration_DTs = set((XSD.duration, XSD.dayTimeDuration, XSD.yearMonthDuration)) _sub_types = { XSD.integer: [ XSD.nonPositiveInteger, XSD.negativeInteger, XSD.long, XSD.int, XSD.short, XSD.byte, XSD.nonNegativeInteger, XSD.positiveInteger, XSD.unsignedLong, XSD.unsignedInt, XSD.unsignedShort, XSD.unsignedByte, ], } _super_types = {} for superdt in XSD_DTs: for subdt in _sub_types.get(superdt, []): _super_types[subdt] = superdt # we only care about float, double, integer, decimal _typePromotionMap = { XSD.float: {XSD.integer: XSD.float, XSD.decimal: XSD.float, XSD.double: XSD.double}, XSD.double: { XSD.integer: XSD.double, XSD.float: XSD.double, XSD.decimal: XSD.double, }, XSD.decimal: { XSD.integer: XSD.decimal, XSD.float: XSD.float, XSD.double: XSD.double, }, XSD.integer: { XSD.decimal: XSD.decimal, XSD.float: XSD.float, XSD.double: XSD.double, }, } def type_promotion(t1, t2): if t2 is None: return t1 t1 = _super_types.get(t1, t1) t2 = _super_types.get(t2, t2) if t1 == t2: return t1 # matching super-types try: return _typePromotionMap[t1][t2] except KeyError: raise TypeError("Operators cannot combine datatypes %s and %s" % (t1, t2)) rdflib-6.1.1/rdflib/plugins/sparql/evaluate.py000066400000000000000000000376611415774155300214170ustar00rootroot00000000000000""" These method recursively evaluate the SPARQL Algebra evalQuery is the entry-point, it will setup context and return the SPARQLResult object evalPart is called on each level and will delegate to the right method A rdflib.plugins.sparql.sparql.QueryContext is passed along, keeping information needed for evaluation A list of dicts (solution mappings) is returned, apart from GroupBy which may also return a dict of list of dicts """ import collections import itertools import re from urllib.request import urlopen, Request from urllib.parse import urlencode import json as j from pyparsing import ParseException from rdflib import Variable, Graph, BNode, URIRef, Literal from rdflib.plugins.sparql import CUSTOM_EVALS from rdflib.plugins.sparql.parserutils import value from rdflib.plugins.sparql.sparql import ( QueryContext, AlreadyBound, FrozenBindings, Bindings, SPARQLError, ) from rdflib.plugins.sparql.evalutils import ( _filter, _eval, _join, _diff, _minus, _fillTemplate, _ebv, _val, ) from rdflib.plugins.sparql.aggregates import Aggregator from rdflib.plugins.sparql import parser def evalBGP(ctx, bgp): """ A basic graph pattern """ if not bgp: yield ctx.solution() return s, p, o = bgp[0] _s = ctx[s] _p = ctx[p] _o = ctx[o] for ss, sp, so in ctx.graph.triples((_s, _p, _o)): if None in (_s, _p, _o): c = ctx.push() else: c = ctx if _s is None: c[s] = ss try: if _p is None: c[p] = sp except AlreadyBound: continue try: if _o is None: c[o] = so except AlreadyBound: continue for x in evalBGP(c, bgp[1:]): yield x def evalExtend(ctx, extend): # TODO: Deal with dict returned from evalPart from GROUP BY for c in evalPart(ctx, extend.p): try: e = _eval(extend.expr, c.forget(ctx, _except=extend._vars)) if isinstance(e, SPARQLError): raise e yield c.merge({extend.var: e}) except SPARQLError: yield c def evalLazyJoin(ctx, join): """ A lazy join will push the variables bound in the first part to the second part, essentially doing the join implicitly hopefully evaluating much fewer triples """ for a in evalPart(ctx, join.p1): c = ctx.thaw(a) for b in evalPart(c, join.p2): yield b.merge(a) # merge, as some bindings may have been forgotten def evalJoin(ctx, join): # TODO: Deal with dict returned from evalPart from GROUP BY # only ever for join.p1 if join.lazy: return evalLazyJoin(ctx, join) else: a = evalPart(ctx, join.p1) b = set(evalPart(ctx, join.p2)) return _join(a, b) def evalUnion(ctx, union): branch1_branch2 = [] for x in evalPart(ctx, union.p1): branch1_branch2.append(x) for x in evalPart(ctx, union.p2): branch1_branch2.append(x) return branch1_branch2 def evalMinus(ctx, minus): a = evalPart(ctx, minus.p1) b = set(evalPart(ctx, minus.p2)) return _minus(a, b) def evalLeftJoin(ctx, join): # import pdb; pdb.set_trace() for a in evalPart(ctx, join.p1): ok = False c = ctx.thaw(a) for b in evalPart(c, join.p2): if _ebv(join.expr, b.forget(ctx)): ok = True yield b if not ok: # we've cheated, the ctx above may contain # vars bound outside our scope # before we yield a solution without the OPTIONAL part # check that we would have had no OPTIONAL matches # even without prior bindings... p1_vars = join.p1._vars if p1_vars is None or not any( _ebv(join.expr, b) for b in evalPart(ctx.thaw(a.remember(p1_vars)), join.p2) ): yield a def evalFilter(ctx, part): # TODO: Deal with dict returned from evalPart! for c in evalPart(ctx, part.p): if _ebv( part.expr, c.forget(ctx, _except=part._vars) if not part.no_isolated_scope else c, ): yield c def evalGraph(ctx, part): if ctx.dataset is None: raise Exception( "Non-conjunctive-graph doesn't know about " + "graphs. Try a query without GRAPH." ) ctx = ctx.clone() graph = ctx[part.term] prev_graph = ctx.graph if graph is None: for graph in ctx.dataset.contexts(): # in SPARQL the default graph is NOT a named graph if graph == ctx.dataset.default_context: continue c = ctx.pushGraph(graph) c = c.push() graphSolution = [{part.term: graph.identifier}] for x in _join(evalPart(c, part.p), graphSolution): x.ctx.graph = prev_graph yield x else: c = ctx.pushGraph(ctx.dataset.get_context(graph)) for x in evalPart(c, part.p): x.ctx.graph = prev_graph yield x def evalValues(ctx, part): for r in part.p.res: c = ctx.push() try: for k, v in r.items(): if v != "UNDEF": c[k] = v except AlreadyBound: continue yield c.solution() def evalMultiset(ctx, part): if part.p.name == "values": return evalValues(ctx, part) return evalPart(ctx, part.p) def evalPart(ctx, part): # try custom evaluation functions for name, c in CUSTOM_EVALS.items(): try: return c(ctx, part) except NotImplementedError: pass # the given custome-function did not handle this part if part.name == "BGP": # Reorder triples patterns by number of bound nodes in the current ctx # Do patterns with more bound nodes first triples = sorted( part.triples, key=lambda t: len([n for n in t if ctx[n] is None]) ) return evalBGP(ctx, triples) elif part.name == "Filter": return evalFilter(ctx, part) elif part.name == "Join": return evalJoin(ctx, part) elif part.name == "LeftJoin": return evalLeftJoin(ctx, part) elif part.name == "Graph": return evalGraph(ctx, part) elif part.name == "Union": return evalUnion(ctx, part) elif part.name == "ToMultiSet": return evalMultiset(ctx, part) elif part.name == "Extend": return evalExtend(ctx, part) elif part.name == "Minus": return evalMinus(ctx, part) elif part.name == "Project": return evalProject(ctx, part) elif part.name == "Slice": return evalSlice(ctx, part) elif part.name == "Distinct": return evalDistinct(ctx, part) elif part.name == "Reduced": return evalReduced(ctx, part) elif part.name == "OrderBy": return evalOrderBy(ctx, part) elif part.name == "Group": return evalGroup(ctx, part) elif part.name == "AggregateJoin": return evalAggregateJoin(ctx, part) elif part.name == "SelectQuery": return evalSelectQuery(ctx, part) elif part.name == "AskQuery": return evalAskQuery(ctx, part) elif part.name == "ConstructQuery": return evalConstructQuery(ctx, part) elif part.name == "ServiceGraphPattern": return evalServiceQuery(ctx, part) # raise Exception('ServiceGraphPattern not implemented') elif part.name == "DescribeQuery": raise Exception("DESCRIBE not implemented") else: raise Exception("I dont know: %s" % part.name) def evalServiceQuery(ctx, part): res = {} match = re.match( "^service <(.*)>[ \n]*{(.*)}[ \n]*$", part.get("service_string", ""), re.DOTALL | re.I, ) if match: service_url = match.group(1) service_query = _buildQueryStringForServiceCall(ctx, match) query_settings = {"query": service_query, "output": "json"} headers = { "accept": "application/sparql-results+json", "user-agent": "rdflibForAnUser", } # GET is easier to cache so prefer that if the query is not to long if len(service_query) < 600: response = urlopen( Request(service_url + "?" + urlencode(query_settings), headers=headers) ) else: response = urlopen( Request( service_url, data=urlencode(query_settings).encode(), headers=headers, ) ) if response.status == 200: json = j.loads(response.read()) variables = res["vars_"] = json["head"]["vars"] # or just return the bindings? res = json["results"]["bindings"] if len(res) > 0: for r in res: for bound in _yieldBindingsFromServiceCallResult(ctx, r, variables): yield bound else: raise Exception( "Service: %s responded with code: %s", service_url, response.status ) """ Build a query string to be used by the service call. It is supposed to pass in the existing bound solutions. Re-adds prefixes if added and sets the base. Wraps it in select if needed. """ def _buildQueryStringForServiceCall(ctx, match): service_query = match.group(2) try: parser.parseQuery(service_query) except ParseException: # This could be because we don't have a select around the service call. service_query = "SELECT REDUCED * WHERE {" + service_query + "}" for p in ctx.prologue.namespace_manager.store.namespaces(): service_query = "PREFIX " + p[0] + ":" + p[1].n3() + " " + service_query # re add the base if one was defined base = ctx.prologue.base if base is not None and len(base) > 0: service_query = "BASE <" + base + "> " + service_query sol = ctx.solution() if len(sol) > 0: variables = " ".join([v.n3() for v in sol]) variables_bound = " ".join([ctx.get(v).n3() for v in sol]) service_query = ( service_query + "VALUES (" + variables + ") {(" + variables_bound + ")}" ) return service_query def _yieldBindingsFromServiceCallResult(ctx, r, variables): res_dict = {} for var in variables: if var in r and r[var]: if r[var]["type"] == "uri": res_dict[Variable(var)] = URIRef(r[var]["value"]) elif r[var]["type"] == "bnode": res_dict[Variable(var)] = BNode(r[var]["value"]) elif r[var]["type"] == "literal" and "datatype" in r[var]: res_dict[Variable(var)] = Literal( r[var]["value"], datatype=r[var]["datatype"] ) elif r[var]["type"] == "literal" and "xml:lang" in r[var]: res_dict[Variable(var)] = Literal( r[var]["value"], lang=r[var]["xml:lang"] ) yield FrozenBindings(ctx, res_dict) def evalGroup(ctx, group): """ http://www.w3.org/TR/sparql11-query/#defn_algGroup """ # grouping should be implemented by evalAggregateJoin return evalPart(ctx, group.p) def evalAggregateJoin(ctx, agg): # import pdb ; pdb.set_trace() p = evalPart(ctx, agg.p) # p is always a Group, we always get a dict back group_expr = agg.p.expr res = collections.defaultdict(lambda: Aggregator(aggregations=agg.A)) if group_expr is None: # no grouping, just COUNT in SELECT clause # get 1 aggregator for counting aggregator = res[True] for row in p: aggregator.update(row) else: for row in p: # determine right group aggregator for row k = tuple(_eval(e, row, False) for e in group_expr) res[k].update(row) # all rows are done; yield aggregated values for aggregator in res.values(): yield FrozenBindings(ctx, aggregator.get_bindings()) # there were no matches if len(res) == 0: yield FrozenBindings(ctx) def evalOrderBy(ctx, part): res = evalPart(ctx, part.p) for e in reversed(part.expr): reverse = bool(e.order and e.order == "DESC") res = sorted( res, key=lambda x: _val(value(x, e.expr, variables=True)), reverse=reverse ) return res def evalSlice(ctx, slice): res = evalPart(ctx, slice.p) return itertools.islice( res, slice.start, slice.start + slice.length if slice.length is not None else None, ) def evalReduced(ctx, part): """apply REDUCED to result REDUCED is not as strict as DISTINCT, but if the incoming rows were sorted it should produce the same result with limited extra memory and time per incoming row. """ # This implementation uses a most recently used strategy and a limited # buffer size. It relates to a LRU caching algorithm: # https://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used_.28LRU.29 MAX = 1 # TODO: add configuration or determine "best" size for most use cases # 0: No reduction # 1: compare only with the last row, almost no reduction with # unordered incoming rows # N: The greater the buffer size the greater the reduction but more # memory and time are needed # mixed data structure: set for lookup, deque for append/pop/remove mru_set = set() mru_queue = collections.deque() for row in evalPart(ctx, part.p): if row in mru_set: # forget last position of row mru_queue.remove(row) else: # row seems to be new yield row mru_set.add(row) if len(mru_set) > MAX: # drop the least recently used row from buffer mru_set.remove(mru_queue.pop()) # put row to the front mru_queue.appendleft(row) def evalDistinct(ctx, part): res = evalPart(ctx, part.p) done = set() for x in res: if x not in done: yield x done.add(x) def evalProject(ctx, project): res = evalPart(ctx, project.p) return (row.project(project.PV) for row in res) def evalSelectQuery(ctx, query): res = {} res["type_"] = "SELECT" res["bindings"] = evalPart(ctx, query.p) res["vars_"] = query.PV return res def evalAskQuery(ctx, query): res = {} res["type_"] = "ASK" res["askAnswer"] = False for x in evalPart(ctx, query.p): res["askAnswer"] = True break return res def evalConstructQuery(ctx, query): template = query.template if not template: # a construct-where query template = query.p.p.triples # query->project->bgp ... graph = Graph() for c in evalPart(ctx, query.p): graph += _fillTemplate(template, c) res = {} res["type_"] = "CONSTRUCT" res["graph"] = graph return res def evalQuery(graph, query, initBindings, base=None): initBindings = dict((Variable(k), v) for k, v in initBindings.items()) ctx = QueryContext(graph, initBindings=initBindings) ctx.prologue = query.prologue main = query.algebra if main.datasetClause: if ctx.dataset is None: raise Exception( "Non-conjunctive-graph doesn't know about " + "graphs! Try a query without FROM (NAMED)." ) ctx = ctx.clone() # or push/pop? firstDefault = False for d in main.datasetClause: if d.default: if firstDefault: # replace current default graph dg = ctx.dataset.get_context(BNode()) ctx = ctx.pushGraph(dg) firstDefault = True ctx.load(d.default, default=True) elif d.named: g = d.named ctx.load(g, default=False) return evalPart(ctx, main) rdflib-6.1.1/rdflib/plugins/sparql/evalutils.py000066400000000000000000000056361415774155300216160ustar00rootroot00000000000000import collections from rdflib.term import Variable, Literal, BNode, URIRef from rdflib.plugins.sparql.operators import EBV from rdflib.plugins.sparql.parserutils import Expr, CompValue from rdflib.plugins.sparql.sparql import SPARQLError, NotBoundError def _diff(a, b, expr): res = set() for x in a: if all(not x.compatible(y) or not _ebv(expr, x.merge(y)) for y in b): res.add(x) return res def _minus(a, b): for x in a: if all((not x.compatible(y)) or x.disjointDomain(y) for y in b): yield x def _join(a, b): for x in a: for y in b: if x.compatible(y): yield x.merge(y) def _ebv(expr, ctx): """ Return true/false for the given expr Either the expr is itself true/false or evaluates to something, with the given ctx an error is false """ try: return EBV(expr) except SPARQLError: pass if isinstance(expr, Expr): try: return EBV(expr.eval(ctx)) except SPARQLError: return False # filter error == False elif isinstance(expr, CompValue): raise Exception("Weird - filter got a CompValue without evalfn! %r" % expr) elif isinstance(expr, Variable): try: return EBV(ctx[expr]) except: return False return False def _eval(expr, ctx, raise_not_bound_error=True): if isinstance(expr, (Literal, URIRef)): return expr if isinstance(expr, Expr): return expr.eval(ctx) elif isinstance(expr, Variable): try: return ctx[expr] except KeyError: if raise_not_bound_error: raise NotBoundError("Variable %s is not bound" % expr) else: return None elif isinstance(expr, CompValue): raise Exception("Weird - _eval got a CompValue without evalfn! %r" % expr) else: raise Exception("Cannot eval thing: %s (%s)" % (expr, type(expr))) def _filter(a, expr): for c in a: if _ebv(expr, c): yield c def _fillTemplate(template, solution): """ For construct/deleteWhere and friends Fill a triple template with instantiated variables """ bnodeMap = collections.defaultdict(BNode) for t in template: s, p, o = t _s = solution.get(s) _p = solution.get(p) _o = solution.get(o) # instantiate new bnodes for each solution _s, _p, _o = [ bnodeMap[x] if isinstance(x, BNode) else y for x, y in zip(t, (_s, _p, _o)) ] if _s is not None and _p is not None and _o is not None: yield (_s, _p, _o) def _val(v): """utilitity for ordering things""" if isinstance(v, Variable): return (0, v) elif isinstance(v, BNode): return (1, v) elif isinstance(v, URIRef): return (2, v) elif isinstance(v, Literal): return (3, v) rdflib-6.1.1/rdflib/plugins/sparql/operators.py000066400000000000000000000763351415774155300216300ustar00rootroot00000000000000""" This contains evaluation functions for expressions They get bound as instances-methods to the CompValue objects from parserutils using setEvalFn """ import sys import re import math import random import uuid import hashlib import datetime as py_datetime # naming conflict with function within this module from functools import reduce from decimal import Decimal, ROUND_HALF_UP, InvalidOperation import operator as pyop # python operators import isodate from rdflib.plugins.sparql.parserutils import CompValue, Expr from rdflib.plugins.sparql.datatypes import XSD_DTs, type_promotion from rdflib.plugins.sparql.datatypes import XSD_DateTime_DTs, XSD_Duration_DTs from rdflib import URIRef, BNode, Variable, Literal, XSD, RDF from rdflib.term import Node from urllib.parse import quote from pyparsing import ParseResults from rdflib.plugins.sparql.sparql import SPARQLError, SPARQLTypeError def Builtin_IRI(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-iri """ a = expr.arg if isinstance(a, URIRef): return a if isinstance(a, Literal): return ctx.prologue.absolutize(URIRef(a)) raise SPARQLError("IRI function only accepts URIRefs or Literals/Strings!") def Builtin_isBLANK(expr, ctx): return Literal(isinstance(expr.arg, BNode)) def Builtin_isLITERAL(expr, ctx): return Literal(isinstance(expr.arg, Literal)) def Builtin_isIRI(expr, ctx): return Literal(isinstance(expr.arg, URIRef)) def Builtin_isNUMERIC(expr, ctx): try: numeric(expr.arg) return Literal(True) except: return Literal(False) def Builtin_BNODE(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-bnode """ a = expr.arg if a is None: return BNode() if isinstance(a, Literal): return ctx.bnodes[a] # defaultdict does the right thing raise SPARQLError("BNode function only accepts no argument or literal/string") def Builtin_ABS(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-abs """ return Literal(abs(numeric(expr.arg))) def Builtin_IF(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-if """ return expr.arg2 if EBV(expr.arg1) else expr.arg3 def Builtin_RAND(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#idp2133952 """ return Literal(random.random()) def Builtin_UUID(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strdt """ return URIRef(uuid.uuid4().urn) def Builtin_STRUUID(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strdt """ return Literal(str(uuid.uuid4())) def Builtin_MD5(expr, ctx): s = string(expr.arg).encode("utf-8") return Literal(hashlib.md5(s).hexdigest()) def Builtin_SHA1(expr, ctx): s = string(expr.arg).encode("utf-8") return Literal(hashlib.sha1(s).hexdigest()) def Builtin_SHA256(expr, ctx): s = string(expr.arg).encode("utf-8") return Literal(hashlib.sha256(s).hexdigest()) def Builtin_SHA384(expr, ctx): s = string(expr.arg).encode("utf-8") return Literal(hashlib.sha384(s).hexdigest()) def Builtin_SHA512(expr, ctx): s = string(expr.arg).encode("utf-8") return Literal(hashlib.sha512(s).hexdigest()) def Builtin_COALESCE(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-coalesce """ for x in expr.get("arg", variables=True): if x is not None and not isinstance(x, (SPARQLError, Variable)): return x raise SPARQLError("COALESCE got no arguments that did not evaluate to an error") def Builtin_CEIL(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-ceil """ l_ = expr.arg return Literal(int(math.ceil(numeric(l_))), datatype=l_.datatype) def Builtin_FLOOR(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-floor """ l_ = expr.arg return Literal(int(math.floor(numeric(l_))), datatype=l_.datatype) def Builtin_ROUND(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-round """ # This used to be just math.bound # but in py3k bound was changed to # "round-to-even" behaviour # this is an ugly work-around l_ = expr.arg v = numeric(l_) v = int(Decimal(v).quantize(1, ROUND_HALF_UP)) return Literal(v, datatype=l_.datatype) def Builtin_REGEX(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-regex Invokes the XPath fn:matches function to match text against a regular expression pattern. The regular expression language is defined in XQuery 1.0 and XPath 2.0 Functions and Operators section 7.6.1 Regular Expression Syntax """ text = string(expr.text) pattern = string(expr.pattern) flags = expr.flags cFlag = 0 if flags: # Maps XPath REGEX flags (http://www.w3.org/TR/xpath-functions/#flags) # to Python's re flags flagMap = dict([("i", re.IGNORECASE), ("s", re.DOTALL), ("m", re.MULTILINE)]) cFlag = reduce(pyop.or_, [flagMap.get(f, 0) for f in flags]) return Literal(bool(re.search(str(pattern), text, cFlag))) def Builtin_REPLACE(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-substr """ text = string(expr.arg) pattern = string(expr.pattern) replacement = string(expr.replacement) flags = expr.flags # python uses \1, xpath/sparql uses $1 replacement = re.sub("\\$([0-9]*)", r"\\\1", replacement) def _r(m): # Now this is ugly. # Python has a "feature" where unmatched groups return None # then re.sub chokes on this. # see http://bugs.python.org/issue1519638 , fixed and errs in py3.5 # this works around and hooks into the internal of the re module... # the match object is replaced with a wrapper that # returns "" instead of None for unmatched groups class _m: def __init__(self, m): self.m = m self.string = m.string def group(self, n): return m.group(n) or "" return re._expand(pattern, _m(m), replacement) cFlag = 0 if flags: # Maps XPath REGEX flags (http://www.w3.org/TR/xpath-functions/#flags) # to Python's re flags flagMap = dict([("i", re.IGNORECASE), ("s", re.DOTALL), ("m", re.MULTILINE)]) cFlag = reduce(pyop.or_, [flagMap.get(f, 0) for f in flags]) # @@FIXME@@ either datatype OR lang, NOT both # this is necessary due to different treatment of unmatched groups in # python versions. see comments above in _r(m). compat_r = str(replacement) if sys.version_info[:2] >= (3, 5) else _r return Literal( re.sub(str(pattern), compat_r, text, cFlag), datatype=text.datatype, lang=text.language, ) def Builtin_STRDT(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strdt """ return Literal(str(expr.arg1), datatype=expr.arg2) def Builtin_STRLANG(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strlang """ s = string(expr.arg1) if s.language or s.datatype: raise SPARQLError("STRLANG expects a simple literal") # TODO: normalisation of lang tag to lower-case # should probably happen in literal __init__ return Literal(str(s), lang=str(expr.arg2).lower()) def Builtin_CONCAT(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-concat """ # dt/lang passed on only if they all match dt = set(x.datatype for x in expr.arg) dt = dt.pop() if len(dt) == 1 else None lang = set(x.language for x in expr.arg) lang = lang.pop() if len(lang) == 1 else None return Literal("".join(string(x) for x in expr.arg), datatype=dt, lang=lang) def _compatibleStrings(a, b): string(a) string(b) if b.language and a.language != b.language: raise SPARQLError("incompatible arguments to str functions") def Builtin_STRSTARTS(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strstarts """ a = expr.arg1 b = expr.arg2 _compatibleStrings(a, b) return Literal(a.startswith(b)) def Builtin_STRENDS(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strends """ a = expr.arg1 b = expr.arg2 _compatibleStrings(a, b) return Literal(a.endswith(b)) def Builtin_STRBEFORE(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strbefore """ a = expr.arg1 b = expr.arg2 _compatibleStrings(a, b) i = a.find(b) if i == -1: return Literal("") else: return Literal(a[:i], lang=a.language, datatype=a.datatype) def Builtin_STRAFTER(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strafter """ a = expr.arg1 b = expr.arg2 _compatibleStrings(a, b) i = a.find(b) if i == -1: return Literal("") else: return Literal(a[i + len(b) :], lang=a.language, datatype=a.datatype) def Builtin_CONTAINS(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-strcontains """ a = expr.arg1 b = expr.arg2 _compatibleStrings(a, b) return Literal(b in a) def Builtin_ENCODE_FOR_URI(expr, ctx): return Literal(quote(string(expr.arg).encode("utf-8"))) def Builtin_SUBSTR(expr, ctx): """ http://www.w3.org/TR/sparql11-query/#func-substr """ a = string(expr.arg) start = numeric(expr.start) - 1 length = expr.length if length is not None: length = numeric(length) + start return Literal(a[start:length], lang=a.language, datatype=a.datatype) def Builtin_STRLEN(e, ctx): l_ = string(e.arg) return Literal(len(l_)) def Builtin_STR(e, ctx): arg = e.arg if isinstance(arg, SPARQLError): raise arg return Literal(str(arg)) # plain literal def Builtin_LCASE(e, ctx): l_ = string(e.arg) return Literal(l_.lower(), datatype=l_.datatype, lang=l_.language) def Builtin_LANGMATCHES(e, ctx): """ http://www.w3.org/TR/sparql11-query/#func-langMatches """ langTag = string(e.arg1) langRange = string(e.arg2) if str(langTag) == "": return Literal(False) # nothing matches empty! return Literal(_lang_range_check(langRange, langTag)) def Builtin_NOW(e, ctx): """ http://www.w3.org/TR/sparql11-query/#func-now """ return Literal(ctx.now) def Builtin_YEAR(e, ctx): d = date(e.arg) return Literal(d.year) def Builtin_MONTH(e, ctx): d = date(e.arg) return Literal(d.month) def Builtin_DAY(e, ctx): d = date(e.arg) return Literal(d.day) def Builtin_HOURS(e, ctx): d = datetime(e.arg) return Literal(d.hour) def Builtin_MINUTES(e, ctx): d = datetime(e.arg) return Literal(d.minute) def Builtin_SECONDS(e, ctx): """ http://www.w3.org/TR/sparql11-query/#func-seconds """ d = datetime(e.arg) return Literal(d.second, datatype=XSD.decimal) def Builtin_TIMEZONE(e, ctx): """ http://www.w3.org/TR/sparql11-query/#func-timezone :returns: the timezone part of arg as an xsd:dayTimeDuration. :raises: an error if there is no timezone. """ dt = datetime(e.arg) if not dt.tzinfo: raise SPARQLError("datatime has no timezone: %r" % dt) delta = dt.utcoffset() d = delta.days s = delta.seconds neg = "" if d < 0: s = -24 * 60 * 60 * d - s d = 0 neg = "-" h = s / (60 * 60) m = (s - h * 60 * 60) / 60 s = s - h * 60 * 60 - m * 60 tzdelta = "%sP%sT%s%s%s" % ( neg, "%dD" % d if d else "", "%dH" % h if h else "", "%dM" % m if m else "", "%dS" % s if not d and not h and not m else "", ) return Literal(tzdelta, datatype=XSD.dayTimeDuration) def Builtin_TZ(e, ctx): d = datetime(e.arg) if not d.tzinfo: return Literal("") n = d.tzinfo.tzname(d) if n == "UTC": n = "Z" return Literal(n) def Builtin_UCASE(e, ctx): l_ = string(e.arg) return Literal(l_.upper(), datatype=l_.datatype, lang=l_.language) def Builtin_LANG(e, ctx): """ http://www.w3.org/TR/sparql11-query/#func-lang Returns the language tag of ltrl, if it has one. It returns "" if ltrl has no language tag. Note that the RDF data model does not include literals with an empty language tag. """ l_ = literal(e.arg) return Literal(l_.language or "") def Builtin_DATATYPE(e, ctx): l_ = e.arg if not isinstance(l_, Literal): raise SPARQLError("Can only get datatype of literal: %r" % l_) if l_.language: return RDF.langString if not l_.datatype and not l_.language: return XSD.string return l_.datatype def Builtin_sameTerm(e, ctx): a = e.arg1 b = e.arg2 return Literal(a == b) def Builtin_BOUND(e, ctx): """ http://www.w3.org/TR/sparql11-query/#func-bound """ n = e.get("arg", variables=True) return Literal(not isinstance(n, Variable)) def Builtin_EXISTS(e, ctx): # damn... from rdflib.plugins.sparql.evaluate import evalPart exists = e.name == "Builtin_EXISTS" ctx = ctx.ctx.thaw(ctx) # hmm for x in evalPart(ctx, e.graph): return Literal(exists) return Literal(not exists) _CUSTOM_FUNCTIONS = {} def register_custom_function(uri, func, override=False, raw=False): """ Register a custom SPARQL function. By default, the function will be passed the RDF terms in the argument list. If raw is True, the function will be passed an Expression and a Context. The function must return an RDF term, or raise a SparqlError. """ if not override and uri in _CUSTOM_FUNCTIONS: raise ValueError("A function is already registered as %s" % uri.n3()) _CUSTOM_FUNCTIONS[uri] = (func, raw) def custom_function(uri, override=False, raw=False): """ Decorator version of :func:`register_custom_function`. """ def decorator(func): register_custom_function(uri, func, override=override, raw=raw) return func return decorator def unregister_custom_function(uri, func): if _CUSTOM_FUNCTIONS.get(uri, (None, None))[0] != func: raise ValueError("This function is not registered as %s" % uri.n3()) del _CUSTOM_FUNCTIONS[uri] def Function(e, ctx): """ Custom functions and casts """ pair = _CUSTOM_FUNCTIONS.get(e.iri) if pair is None: # no such function is registered raise SPARQLError("Unknown function %r" % e.iri) func, raw = pair if raw: # function expects expression and context return func(e, ctx) else: # function expects the argument list try: return func(*e.expr) except TypeError as ex: # wrong argument number raise SPARQLError(*ex.args) @custom_function(XSD.string, raw=True) @custom_function(XSD.dateTime, raw=True) @custom_function(XSD.float, raw=True) @custom_function(XSD.double, raw=True) @custom_function(XSD.decimal, raw=True) @custom_function(XSD.integer, raw=True) @custom_function(XSD.boolean, raw=True) def default_cast(e, ctx): if not e.expr: raise SPARQLError("Nothing given to cast.") if len(e.expr) > 1: raise SPARQLError("Cannot cast more than one thing!") x = e.expr[0] if e.iri == XSD.string: if isinstance(x, (URIRef, Literal)): return Literal(x, datatype=XSD.string) else: raise SPARQLError("Cannot cast term %r of type %r" % (x, type(x))) if not isinstance(x, Literal): raise SPARQLError("Can only cast Literals to non-string data-types") if x.datatype and not x.datatype in XSD_DTs: raise SPARQLError("Cannot cast literal with unknown datatype: %r" % x.datatype) if e.iri == XSD.dateTime: if x.datatype and x.datatype not in (XSD.dateTime, XSD.string): raise SPARQLError("Cannot cast %r to XSD:dateTime" % x.datatype) try: return Literal(isodate.parse_datetime(x), datatype=e.iri) except: raise SPARQLError("Cannot interpret '%r' as datetime" % x) if x.datatype == XSD.dateTime: raise SPARQLError("Cannot cast XSD.dateTime to %r" % e.iri) if e.iri in (XSD.float, XSD.double): try: return Literal(float(x), datatype=e.iri) except: raise SPARQLError("Cannot interpret '%r' as float" % x) elif e.iri == XSD.decimal: if "e" in x or "E" in x: # SPARQL/XSD does not allow exponents in decimals raise SPARQLError("Cannot interpret '%r' as decimal" % x) try: return Literal(Decimal(x), datatype=e.iri) except: raise SPARQLError("Cannot interpret '%r' as decimal" % x) elif e.iri == XSD.integer: try: return Literal(int(x), datatype=XSD.integer) except: raise SPARQLError("Cannot interpret '%r' as int" % x) elif e.iri == XSD.boolean: # # I would argue that any number is True... # try: # return Literal(bool(int(x)), datatype=XSD.boolean) # except: if x.lower() in ("1", "true"): return Literal(True) if x.lower() in ("0", "false"): return Literal(False) raise SPARQLError("Cannot interpret '%r' as bool" % x) def UnaryNot(expr, ctx): return Literal(not EBV(expr.expr)) def UnaryMinus(expr, ctx): return Literal(-numeric(expr.expr)) def UnaryPlus(expr, ctx): return Literal(+numeric(expr.expr)) def MultiplicativeExpression(e, ctx): expr = e.expr other = e.other # because of the way the mul-expr production handled operator precedence # we sometimes have nothing to do if other is None: return expr try: res = Decimal(numeric(expr)) for op, f in zip(e.op, other): f = numeric(f) if type(f) == float: res = float(res) if op == "*": res *= f else: res /= f except (InvalidOperation, ZeroDivisionError): raise SPARQLError("divide by 0") return Literal(res) def AdditiveExpression(e, ctx): expr = e.expr other = e.other # because of the way the add-expr production handled operator precedence # we sometimes have nothing to do if other is None: return expr # handling arithmetic(addition/subtraction) of dateTime, date, time # and duration datatypes (if any) if hasattr(expr, "datatype") and ( expr.datatype in XSD_DateTime_DTs or expr.datatype in XSD_Duration_DTs ): res = dateTimeObjects(expr) dt = expr.datatype for op, term in zip(e.op, other): # check if operation is datetime,date,time operation over # another datetime,date,time datatype if dt in XSD_DateTime_DTs and dt == term.datatype and op == "-": # checking if there are more than one datetime operands - # in that case it doesn't make sense for example # ( dateTime1 - dateTime2 - dateTime3 ) is an invalid operation if len(other) > 1: error_message = "Can't evaluate multiple %r arguments" raise SPARQLError(error_message, dt.datatype) else: n = dateTimeObjects(term) res = calculateDuration(res, n) return res # datetime,date,time +/- duration,dayTimeDuration,yearMonthDuration elif dt in XSD_DateTime_DTs and term.datatype in XSD_Duration_DTs: n = dateTimeObjects(term) res = calculateFinalDateTime(res, dt, n, term.datatype, op) return res # duration,dayTimeDuration,yearMonthDuration + datetime,date,time elif dt in XSD_Duration_DTs and term.datatype in XSD_DateTime_DTs: if op == "+": n = dateTimeObjects(term) res = calculateFinalDateTime(res, dt, n, term.datatype, op) return res # rest are invalid types else: raise SPARQLError("Invalid DateTime Operations") # handling arithmetic(addition/subtraction) of numeric datatypes (if any) else: res = numeric(expr) dt = expr.datatype for op, term in zip(e.op, other): n = numeric(term) if isinstance(n, Decimal) and isinstance(res, float): n = float(n) if isinstance(n, float) and isinstance(res, Decimal): res = float(res) dt = type_promotion(dt, term.datatype) if op == "+": res += n else: res -= n return Literal(res, datatype=dt) def RelationalExpression(e, ctx): expr = e.expr other = e.other op = e.op # because of the way the add-expr production handled operator precedence # we sometimes have nothing to do if other is None: return expr ops = dict( [ (">", lambda x, y: x.__gt__(y)), ("<", lambda x, y: x.__lt__(y)), ("=", lambda x, y: x.eq(y)), ("!=", lambda x, y: x.neq(y)), (">=", lambda x, y: x.__ge__(y)), ("<=", lambda x, y: x.__le__(y)), ("IN", pyop.contains), ("NOT IN", lambda x, y: not pyop.contains(x, y)), ] ) if op in ("IN", "NOT IN"): res = op == "NOT IN" error = False if other == RDF.nil: other = [] for x in other: try: if x == expr: return Literal(True ^ res) except SPARQLError as e: error = e if not error: return Literal(False ^ res) else: raise error if op not in ("=", "!=", "IN", "NOT IN"): if not isinstance(expr, Literal): raise SPARQLError( "Compare other than =, != of non-literals is an error: %r" % expr ) if not isinstance(other, Literal): raise SPARQLError( "Compare other than =, != of non-literals is an error: %r" % other ) else: if not isinstance(expr, Node): raise SPARQLError("I cannot compare this non-node: %r" % expr) if not isinstance(other, Node): raise SPARQLError("I cannot compare this non-node: %r" % other) if isinstance(expr, Literal) and isinstance(other, Literal): if ( expr.datatype is not None and expr.datatype not in XSD_DTs and other.datatype is not None and other.datatype not in XSD_DTs ): # in SPARQL for non-XSD DT Literals we can only do =,!= if op not in ("=", "!="): raise SPARQLError("Can only do =,!= comparisons of non-XSD Literals") try: r = ops[op](expr, other) if r == NotImplemented: raise SPARQLError("Error when comparing") except TypeError as te: raise SPARQLError(*te.args) return Literal(r) def ConditionalAndExpression(e, ctx): # TODO: handle returned errors expr = e.expr other = e.other # because of the way the add-expr production handled operator precedence # we sometimes have nothing to do if other is None: return expr return Literal(all(EBV(x) for x in [expr] + other)) def ConditionalOrExpression(e, ctx): # TODO: handle errors expr = e.expr other = e.other # because of the way the add-expr production handled operator precedence # we sometimes have nothing to do if other is None: return expr # A logical-or that encounters an error on only one branch # will return TRUE if the other branch is TRUE and an error # if the other branch is FALSE. error = None for x in [expr] + other: try: if EBV(x): return Literal(True) except SPARQLError as e: error = e if error: raise error return Literal(False) def not_(arg): return Expr("UnaryNot", UnaryNot, expr=arg) def and_(*args): if len(args) == 1: return args[0] return Expr( "ConditionalAndExpression", ConditionalAndExpression, expr=args[0], other=list(args[1:]), ) TrueFilter = Expr("TrueFilter", lambda _1, _2: Literal(True)) def simplify(expr): if isinstance(expr, ParseResults) and len(expr) == 1: return simplify(expr[0]) if isinstance(expr, (list, ParseResults)): return list(map(simplify, expr)) if not isinstance(expr, CompValue): return expr if expr.name.endswith("Expression"): if expr.other is None: return simplify(expr.expr) for k in expr.keys(): expr[k] = simplify(expr[k]) # expr['expr']=simplify(expr.expr) # expr['other']=simplify(expr.other) return expr def literal(s): if not isinstance(s, Literal): raise SPARQLError("Non-literal passed as string: %r" % s) return s def datetime(e): if not isinstance(e, Literal): raise SPARQLError("Non-literal passed as datetime: %r" % e) if not e.datatype == XSD.dateTime: raise SPARQLError("Literal with wrong datatype passed as datetime: %r" % e) return e.toPython() def date(e) -> py_datetime.date: if not isinstance(e, Literal): raise SPARQLError("Non-literal passed as date: %r" % e) if e.datatype not in (XSD.date, XSD.dateTime): raise SPARQLError("Literal with wrong datatype passed as date: %r" % e) result = e.toPython() if isinstance(result, py_datetime.datetime): return result.date() return result def string(s): """ Make sure the passed thing is a string literal i.e. plain literal, xsd:string literal or lang-tagged literal """ if not isinstance(s, Literal): raise SPARQLError("Non-literal passes as string: %r" % s) if s.datatype and s.datatype != XSD.string: raise SPARQLError("Non-string datatype-literal passes as string: %r" % s) return s def numeric(expr): """ return a number from a literal http://www.w3.org/TR/xpath20/#promotion or TypeError """ if not isinstance(expr, Literal): raise SPARQLTypeError("%r is not a literal!" % expr) if expr.datatype not in ( XSD.float, XSD.double, XSD.decimal, XSD.integer, XSD.nonPositiveInteger, XSD.negativeInteger, XSD.nonNegativeInteger, XSD.positiveInteger, XSD.unsignedLong, XSD.unsignedInt, XSD.unsignedShort, XSD.unsignedByte, XSD.long, XSD.int, XSD.short, XSD.byte, ): raise SPARQLTypeError("%r does not have a numeric datatype!" % expr) return expr.toPython() def dateTimeObjects(expr): """ return a dataTime/date/time/duration/dayTimeDuration/yearMonthDuration python objects from a literal """ return expr.toPython() def isCompatibleDateTimeDatatype(obj1, dt1, obj2, dt2): """ Returns a boolean indicating if first object is compatible with operation(+/-) over second object. """ if dt1 == XSD.date: if dt2 == XSD.yearMonthDuration: return True elif dt2 == XSD.dayTimeDuration or dt2 == XSD.Duration: # checking if the dayTimeDuration has no Time Component # else it wont be compatible with Date Literal if "T" in str(obj2): return False else: return True if dt1 == XSD.time: if dt2 == XSD.yearMonthDuration: return False elif dt2 == XSD.dayTimeDuration or dt2 == XSD.Duration: # checking if the dayTimeDuration has no Date Component # (by checking if the format is "PT...." ) # else it wont be compatible with Time Literal if "T" == str(obj2)[1]: return True else: return False if dt1 == XSD.dateTime: # compatible with all return True def calculateDuration(obj1, obj2): """ returns the duration Literal between two datetime """ date1 = obj1 date2 = obj2 difference = date1 - date2 return Literal(difference, datatype=XSD.duration) def calculateFinalDateTime(obj1, dt1, obj2, dt2, operation): """ Calculates the final dateTime/date/time resultant after addition/ subtraction of duration/dayTimeDuration/yearMonthDuration """ # checking compatibility of datatypes (duration types and date/time/dateTime) if isCompatibleDateTimeDatatype(obj1, dt1, obj2, dt2): # proceed if operation == "-": ans = obj1 - obj2 return Literal(ans, datatype=dt1) else: ans = obj1 + obj2 return Literal(ans, datatype=dt1) else: raise SPARQLError("Incompatible Data types to DateTime Operations") def EBV(rt): """ Effective Boolean Value (EBV) * If the argument is a typed literal with a datatype of xsd:boolean, the EBV is the value of that argument. * If the argument is a plain literal or a typed literal with a datatype of xsd:string, the EBV is false if the operand value has zero length; otherwise the EBV is true. * If the argument is a numeric type or a typed literal with a datatype derived from a numeric type, the EBV is false if the operand value is NaN or is numerically equal to zero; otherwise the EBV is true. * All other arguments, including unbound arguments, produce a type error. """ if isinstance(rt, Literal): if rt.datatype == XSD.boolean: return rt.toPython() elif rt.datatype == XSD.string or rt.datatype is None: return len(rt) > 0 else: pyRT = rt.toPython() if isinstance(pyRT, Literal): # Type error, see: http://www.w3.org/TR/rdf-sparql-query/#ebv raise SPARQLTypeError( "http://www.w3.org/TR/rdf-sparql-query/#ebv - ' + \ 'Could not determine the EBV for : %r" % rt ) else: return bool(pyRT) else: raise SPARQLTypeError( "http://www.w3.org/TR/rdf-sparql-query/#ebv - ' + \ 'Only literals have Boolean values! %r" % rt ) def _lang_range_check(range, lang): """ Implementation of the extended filtering algorithm, as defined in point 3.3.2, of U{RFC 4647}, on matching language ranges and language tags. Needed to handle the C{rdf:PlainLiteral} datatype. @param range: language range @param lang: language tag @rtype: boolean @author: U{Ivan Herman} Taken from `RDFClosure/RestrictedDatatype.py`__ .. __:http://dev.w3.org/2004/PythonLib-IH/RDFClosure/RestrictedDatatype.py """ def _match(r, l_): """ Matching of a range and language item: either range is a wildcard or the two are equal @param r: language range item @param l_: language tag item @rtype: boolean """ return r == "*" or r == l_ rangeList = range.strip().lower().split("-") langList = lang.strip().lower().split("-") if not _match(rangeList[0], langList[0]): return False if len(rangeList) > len(langList): return False return all(_match(*x) for x in zip(rangeList, langList)) rdflib-6.1.1/rdflib/plugins/sparql/parser.py000066400000000000000000001435461415774155300211050ustar00rootroot00000000000000""" SPARQL 1.1 Parser based on pyparsing """ import sys import re from pyparsing import ( Literal, Regex, Optional, OneOrMore, ZeroOrMore, Forward, ParseException, Suppress, Combine, restOfLine, Group, ParseResults, delimitedList, ) from pyparsing import CaselessKeyword as Keyword # watch out :) # from pyparsing import Keyword as CaseSensitiveKeyword from .parserutils import Comp, Param, ParamList from . import operators as op from rdflib.compat import decodeUnicodeEscape import rdflib DEBUG = False # ---------------- ACTIONS def neg(literal): return rdflib.Literal(-literal, datatype=literal.datatype) def setLanguage(terms): return rdflib.Literal(terms[0], lang=terms[1]) def setDataType(terms): return rdflib.Literal(terms[0], datatype=terms[1]) def expandTriples(terms): """ Expand ; and , syntax for repeat predicates, subjects """ # import pdb; pdb.set_trace() try: res = [] if DEBUG: print("Terms", terms) l_ = len(terms) for i, t in enumerate(terms): if t == ",": res.extend([res[-3], res[-2]]) elif t == ";": if i + 1 == len(terms) or terms[i + 1] == ";" or terms[i + 1] == ".": continue # this semicolon is spurious res.append(res[0]) elif isinstance(t, list): # BlankNodePropertyList # is this bnode the object of previous triples? if (len(res) % 3) == 2: res.append(t[0]) # is this a single [] ? if len(t) > 1: res += t # is this bnode the subject of more triples? if i + 1 < l_ and terms[i + 1] not in ".,;": res.append(t[0]) elif isinstance(t, ParseResults): res += t.asList() elif t != ".": res.append(t) if DEBUG: print(len(res), t) if DEBUG: import json print(json.dumps(res, indent=2)) return res # print res # assert len(res)%3 == 0, \ # "Length of triple-list is not divisible by 3: %d!"%len(res) # return [tuple(res[i:i+3]) for i in range(len(res)/3)] except: if DEBUG: import traceback traceback.print_exc() raise def expandBNodeTriples(terms): """ expand [ ?p ?o ] syntax for implicit bnodes """ # import pdb; pdb.set_trace() try: if DEBUG: print("Bnode terms", terms) print("1", terms[0]) print("2", [rdflib.BNode()] + terms.asList()[0]) return [expandTriples([rdflib.BNode()] + terms.asList()[0])] except Exception as e: if DEBUG: print(">>>>>>>>", e) raise def expandCollection(terms): """ expand ( 1 2 3 ) notation for collections """ if DEBUG: print("Collection: ", terms) res = [] other = [] for x in terms: if isinstance(x, list): # is this a [ .. ] ? other += x x = x[0] b = rdflib.BNode() if res: res += [res[-3], rdflib.RDF.rest, b, b, rdflib.RDF.first, x] else: res += [b, rdflib.RDF.first, x] res += [b, rdflib.RDF.rest, rdflib.RDF.nil] res += other if DEBUG: print("CollectionOut", res) return [res] # SPARQL Grammar from http://www.w3.org/TR/sparql11-query/#grammar # ------ TERMINALS -------------- # [139] IRIREF ::= '<' ([^<>"{}|^`\]-[#x00-#x20])* '>' IRIREF = Combine( Suppress("<") + Regex(r'[^<>"{}|^`\\%s]*' % "".join("\\x%02X" % i for i in range(33))) + Suppress(">") ) IRIREF.setParseAction(lambda x: rdflib.URIRef(x[0])) # [164] P_CHARS_BASE ::= [A-Z] | [a-z] | [#x00C0-#x00D6] | [#x00D8-#x00F6] | [#x00F8-#x02FF] | [#x0370-#x037D] | [#x037F-#x1FFF] | [#x200C-#x200D] | [#x2070-#x218F] | [#x2C00-#x2FEF] | [#x3001-#xD7FF] | [#xF900-#xFDCF] | [#xFDF0-#xFFFD] | [#x10000-#xEFFFF] if sys.maxunicode == 0xFFFF: # this is narrow python build (default on windows/osx) # this means that unicode code points over 0xffff are stored # as several characters, which in turn means that regex character # ranges with these characters do not work. # See # * http://bugs.python.org/issue12729 # * http://bugs.python.org/issue12749 # * http://bugs.python.org/issue3665 # # Here we simple skip the [#x10000-#xEFFFF] part # this means that some SPARQL queries will not parse :( # We *could* generate a new regex with \U00010000|\U00010001 ... # but it would be quite long :) # # in py3.3 this is fixed PN_CHARS_BASE_re = "A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD" else: # wide python build PN_CHARS_BASE_re = "A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\U00010000-\U000EFFFF" # [165] PN_CHARS_U ::= PN_CHARS_BASE | '_' PN_CHARS_U_re = "_" + PN_CHARS_BASE_re # [167] PN_CHARS ::= PN_CHARS_U | '-' | [0-9] | #x00B7 | [#x0300-#x036F] | [#x203F-#x2040] PN_CHARS_re = "\\-0-9\u00B7\u0300-\u036F\u203F-\u2040" + PN_CHARS_U_re # PN_CHARS = Regex(u'[%s]'%PN_CHARS_re, flags=re.U) # [168] PN_PREFIX ::= PN_CHARS_BASE ((PN_CHARS|'.')* PN_CHARS)? PN_PREFIX = Regex( "[%s](?:[%s\\.]*[%s])?" % (PN_CHARS_BASE_re, PN_CHARS_re, PN_CHARS_re), flags=re.U ) # [140] PNAME_NS ::= PN_PREFIX? ':' PNAME_NS = Optional(Param("prefix", PN_PREFIX)) + Suppress(":").leaveWhitespace() # [173] PN_LOCAL_ESC ::= '\' ( '_' | '~' | '.' | '-' | '!' | '$' | '&' | "'" | '(' | ')' | '*' | '+' | ',' | ';' | '=' | '/' | '?' | '#' | '@' | '%' ) PN_LOCAL_ESC_re = "\\\\[_~\\.\\-!$&\"'()*+,;=/?#@%]" # PN_LOCAL_ESC = Regex(PN_LOCAL_ESC_re) # regex'd # PN_LOCAL_ESC.setParseAction(lambda x: x[0][1:]) # [172] HEX ::= [0-9] | [A-F] | [a-f] # HEX = Regex('[0-9A-Fa-f]') # not needed # [171] PERCENT ::= '%' HEX HEX PERCENT_re = "%[0-9a-fA-F]{2}" # PERCENT = Regex(PERCENT_re) # regex'd # PERCENT.setParseAction(lambda x: chr(int(x[0][1:], 16))) # [170] PLX ::= PERCENT | PN_LOCAL_ESC PLX_re = "(%s|%s)" % (PN_LOCAL_ESC_re, PERCENT_re) # PLX = PERCENT | PN_LOCAL_ESC # regex'd # [169] PN_LOCAL ::= (PN_CHARS_U | ':' | [0-9] | PLX ) ((PN_CHARS | '.' | ':' | PLX)* (PN_CHARS | ':' | PLX) )? PN_LOCAL = Regex( """([%(PN_CHARS_U)s:0-9]|%(PLX)s) (([%(PN_CHARS)s\\.:]|%(PLX)s)* ([%(PN_CHARS)s:]|%(PLX)s) )?""" % dict(PN_CHARS_U=PN_CHARS_U_re, PN_CHARS=PN_CHARS_re, PLX=PLX_re), flags=re.X | re.UNICODE, ) def _hexExpand(match): return chr(int(match.group(0)[1:], 16)) PN_LOCAL.setParseAction(lambda x: re.sub("(%s)" % PERCENT_re, _hexExpand, x[0])) # [141] PNAME_LN ::= PNAME_NS PN_LOCAL PNAME_LN = PNAME_NS + Param("localname", PN_LOCAL.leaveWhitespace()) # [142] BLANK_NODE_LABEL ::= '_:' ( PN_CHARS_U | [0-9] ) ((PN_CHARS|'.')* PN_CHARS)? BLANK_NODE_LABEL = Regex( "_:[0-9%s](?:[\\.%s]*[%s])?" % (PN_CHARS_U_re, PN_CHARS_re, PN_CHARS_re), flags=re.U, ) BLANK_NODE_LABEL.setParseAction(lambda x: rdflib.BNode(x[0][2:])) # [166] VARNAME ::= ( PN_CHARS_U | [0-9] ) ( PN_CHARS_U | [0-9] | #x00B7 | [#x0300-#x036F] | [#x203F-#x2040] )* VARNAME = Regex( "[%s0-9][%s0-9\u00B7\u0300-\u036F\u203F-\u2040]*" % (PN_CHARS_U_re, PN_CHARS_U_re), flags=re.U, ) # [143] VAR1 ::= '?' VARNAME VAR1 = Combine(Suppress("?") + VARNAME) # [144] VAR2 ::= '$' VARNAME VAR2 = Combine(Suppress("$") + VARNAME) # [145] LANGTAG ::= '@' [a-zA-Z]+ ('-' [a-zA-Z0-9]+)* LANGTAG = Combine(Suppress("@") + Regex("[a-zA-Z]+(?:-[a-zA-Z0-9]+)*")) # [146] INTEGER ::= [0-9]+ INTEGER = Regex(r"[0-9]+") # INTEGER.setResultsName('integer') INTEGER.setParseAction(lambda x: rdflib.Literal(x[0], datatype=rdflib.XSD.integer)) # [155] EXPONENT ::= [eE] [+-]? [0-9]+ EXPONENT_re = "[eE][+-]?[0-9]+" # [147] DECIMAL ::= [0-9]* '.' [0-9]+ DECIMAL = Regex(r"[0-9]*\.[0-9]+") # (?![eE]) # DECIMAL.setResultsName('decimal') DECIMAL.setParseAction(lambda x: rdflib.Literal(x[0], datatype=rdflib.XSD.decimal)) # [148] DOUBLE ::= [0-9]+ '.' [0-9]* EXPONENT | '.' ([0-9])+ EXPONENT | ([0-9])+ EXPONENT DOUBLE = Regex(r"[0-9]+\.[0-9]*%(e)s|\.([0-9])+%(e)s|[0-9]+%(e)s" % {"e": EXPONENT_re}) # DOUBLE.setResultsName('double') DOUBLE.setParseAction(lambda x: rdflib.Literal(x[0], datatype=rdflib.XSD.double)) # [149] INTEGER_POSITIVE ::= '+' INTEGER INTEGER_POSITIVE = Suppress("+") + INTEGER.copy().leaveWhitespace() INTEGER_POSITIVE.setParseAction( lambda x: rdflib.Literal("+" + x[0], datatype=rdflib.XSD.integer) ) # [150] DECIMAL_POSITIVE ::= '+' DECIMAL DECIMAL_POSITIVE = Suppress("+") + DECIMAL.copy().leaveWhitespace() # [151] DOUBLE_POSITIVE ::= '+' DOUBLE DOUBLE_POSITIVE = Suppress("+") + DOUBLE.copy().leaveWhitespace() # [152] INTEGER_NEGATIVE ::= '-' INTEGER INTEGER_NEGATIVE = Suppress("-") + INTEGER.copy().leaveWhitespace() INTEGER_NEGATIVE.setParseAction(lambda x: neg(x[0])) # [153] DECIMAL_NEGATIVE ::= '-' DECIMAL DECIMAL_NEGATIVE = Suppress("-") + DECIMAL.copy().leaveWhitespace() DECIMAL_NEGATIVE.setParseAction(lambda x: neg(x[0])) # [154] DOUBLE_NEGATIVE ::= '-' DOUBLE DOUBLE_NEGATIVE = Suppress("-") + DOUBLE.copy().leaveWhitespace() DOUBLE_NEGATIVE.setParseAction(lambda x: neg(x[0])) # [160] ECHAR ::= '\' [tbnrf\"'] # ECHAR = Regex('\\\\[tbnrf"\']') # [158] STRING_LITERAL_LONG1 ::= "'''" ( ( "'" | "''" )? ( [^'\] | ECHAR ) )* "'''" # STRING_LITERAL_LONG1 = Literal("'''") + ( Optional( Literal("'") | "''" # ) + ZeroOrMore( ~ Literal("'\\") | ECHAR ) ) + "'''" STRING_LITERAL_LONG1 = Regex("'''((?:'|'')?(?:[^'\\\\]|\\\\['ntbrf\\\\]))*'''") STRING_LITERAL_LONG1.setParseAction( lambda x: rdflib.Literal(decodeUnicodeEscape(x[0][3:-3])) ) # [159] STRING_LITERAL_LONG2 ::= '"""' ( ( '"' | '""' )? ( [^"\] | ECHAR ) )* '"""' # STRING_LITERAL_LONG2 = Literal('"""') + ( Optional( Literal('"') | '""' # ) + ZeroOrMore( ~ Literal('"\\') | ECHAR ) ) + '"""' STRING_LITERAL_LONG2 = Regex('"""(?:(?:"|"")?(?:[^"\\\\]|\\\\["ntbrf\\\\]))*"""') STRING_LITERAL_LONG2.setParseAction( lambda x: rdflib.Literal(decodeUnicodeEscape(x[0][3:-3])) ) # [156] STRING_LITERAL1 ::= "'" ( ([^#x27#x5C#xA#xD]) | ECHAR )* "'" # STRING_LITERAL1 = Literal("'") + ZeroOrMore( # Regex(u'[^\u0027\u005C\u000A\u000D]',flags=re.U) | ECHAR ) + "'" STRING_LITERAL1 = Regex("'(?:[^'\\n\\r\\\\]|\\\\['ntbrf\\\\])*'(?!')", flags=re.U) STRING_LITERAL1.setParseAction( lambda x: rdflib.Literal(decodeUnicodeEscape(x[0][1:-1])) ) # [157] STRING_LITERAL2 ::= '"' ( ([^#x22#x5C#xA#xD]) | ECHAR )* '"' # STRING_LITERAL2 = Literal('"') + ZeroOrMore ( # Regex(u'[^\u0022\u005C\u000A\u000D]',flags=re.U) | ECHAR ) + '"' STRING_LITERAL2 = Regex('"(?:[^"\\n\\r\\\\]|\\\\["ntbrf\\\\])*"(?!")', flags=re.U) STRING_LITERAL2.setParseAction( lambda x: rdflib.Literal(decodeUnicodeEscape(x[0][1:-1])) ) # [161] NIL ::= '(' WS* ')' NIL = Literal("(") + ")" NIL.setParseAction(lambda x: rdflib.RDF.nil) # [162] WS ::= #x20 | #x9 | #xD | #xA # Not needed? # WS = #x20 | #x9 | #xD | #xA # [163] ANON ::= '[' WS* ']' ANON = Literal("[") + "]" ANON.setParseAction(lambda x: rdflib.BNode()) # A = CaseSensitiveKeyword('a') A = Literal("a") A.setParseAction(lambda x: rdflib.RDF.type) # ------ NON-TERMINALS -------------- # [5] BaseDecl ::= 'BASE' IRIREF BaseDecl = Comp("Base", Keyword("BASE") + Param("iri", IRIREF)) # [6] PrefixDecl ::= 'PREFIX' PNAME_NS IRIREF PrefixDecl = Comp("PrefixDecl", Keyword("PREFIX") + PNAME_NS + Param("iri", IRIREF)) # [4] Prologue ::= ( BaseDecl | PrefixDecl )* Prologue = Group(ZeroOrMore(BaseDecl | PrefixDecl)) # [108] Var ::= VAR1 | VAR2 Var = VAR1 | VAR2 Var.setParseAction(lambda x: rdflib.term.Variable(x[0])) # [137] PrefixedName ::= PNAME_LN | PNAME_NS PrefixedName = Comp("pname", PNAME_LN | PNAME_NS) # [136] iri ::= IRIREF | PrefixedName iri = IRIREF | PrefixedName # [135] String ::= STRING_LITERAL1 | STRING_LITERAL2 | STRING_LITERAL_LONG1 | STRING_LITERAL_LONG2 String = STRING_LITERAL_LONG1 | STRING_LITERAL_LONG2 | STRING_LITERAL1 | STRING_LITERAL2 # [129] RDFLiteral ::= String ( LANGTAG | ( '^^' iri ) )? RDFLiteral = Comp( "literal", Param("string", String) + Optional( Param("lang", LANGTAG.leaveWhitespace()) | Literal("^^").leaveWhitespace() + Param("datatype", iri).leaveWhitespace() ), ) # [132] NumericLiteralPositive ::= INTEGER_POSITIVE | DECIMAL_POSITIVE | DOUBLE_POSITIVE NumericLiteralPositive = DOUBLE_POSITIVE | DECIMAL_POSITIVE | INTEGER_POSITIVE # [133] NumericLiteralNegative ::= INTEGER_NEGATIVE | DECIMAL_NEGATIVE | DOUBLE_NEGATIVE NumericLiteralNegative = DOUBLE_NEGATIVE | DECIMAL_NEGATIVE | INTEGER_NEGATIVE # [131] NumericLiteralUnsigned ::= INTEGER | DECIMAL | DOUBLE NumericLiteralUnsigned = DOUBLE | DECIMAL | INTEGER # [130] NumericLiteral ::= NumericLiteralUnsigned | NumericLiteralPositive | NumericLiteralNegative NumericLiteral = ( NumericLiteralUnsigned | NumericLiteralPositive | NumericLiteralNegative ) # [134] BooleanLiteral ::= 'true' | 'false' BooleanLiteral = Keyword("true").setParseAction(lambda: rdflib.Literal(True)) | Keyword( "false" ).setParseAction(lambda: rdflib.Literal(False)) # [138] BlankNode ::= BLANK_NODE_LABEL | ANON BlankNode = BLANK_NODE_LABEL | ANON # [109] GraphTerm ::= iri | RDFLiteral | NumericLiteral | BooleanLiteral | BlankNode | NIL GraphTerm = iri | RDFLiteral | NumericLiteral | BooleanLiteral | BlankNode | NIL # [106] VarOrTerm ::= Var | GraphTerm VarOrTerm = Var | GraphTerm # [107] VarOrIri ::= Var | iri VarOrIri = Var | iri # [46] GraphRef ::= 'GRAPH' iri GraphRef = Keyword("GRAPH") + Param("graphiri", iri) # [47] GraphRefAll ::= GraphRef | 'DEFAULT' | 'NAMED' | 'ALL' GraphRefAll = ( GraphRef | Param("graphiri", Keyword("DEFAULT")) | Param("graphiri", Keyword("NAMED")) | Param("graphiri", Keyword("ALL")) ) # [45] GraphOrDefault ::= 'DEFAULT' | 'GRAPH'? iri GraphOrDefault = ParamList("graph", Keyword("DEFAULT")) | Optional( Keyword("GRAPH") ) + ParamList("graph", iri) # [65] DataBlockValue ::= iri | RDFLiteral | NumericLiteral | BooleanLiteral | 'UNDEF' DataBlockValue = iri | RDFLiteral | NumericLiteral | BooleanLiteral | Keyword("UNDEF") # [78] Verb ::= VarOrIri | A Verb = VarOrIri | A # [85] VerbSimple ::= Var VerbSimple = Var # [97] Integer ::= INTEGER Integer = INTEGER TriplesNode = Forward() TriplesNodePath = Forward() # [104] GraphNode ::= VarOrTerm | TriplesNode GraphNode = VarOrTerm | TriplesNode # [105] GraphNodePath ::= VarOrTerm | TriplesNodePath GraphNodePath = VarOrTerm | TriplesNodePath # [93] PathMod ::= '?' | '*' | '+' PathMod = Literal("?") | "*" | "+" # [96] PathOneInPropertySet ::= iri | A | '^' ( iri | A ) PathOneInPropertySet = iri | A | Comp("InversePath", "^" + (iri | A)) Path = Forward() # [95] PathNegatedPropertySet ::= PathOneInPropertySet | '(' ( PathOneInPropertySet ( '|' PathOneInPropertySet )* )? ')' PathNegatedPropertySet = Comp( "PathNegatedPropertySet", ParamList("part", PathOneInPropertySet) | "(" + Optional( ParamList("part", PathOneInPropertySet) + ZeroOrMore("|" + ParamList("part", PathOneInPropertySet)) ) + ")", ) # [94] PathPrimary ::= iri | A | '!' PathNegatedPropertySet | '(' Path ')' | 'DISTINCT' '(' Path ')' PathPrimary = ( iri | A | Suppress("!") + PathNegatedPropertySet | Suppress("(") + Path + Suppress(")") | Comp("DistinctPath", Keyword("DISTINCT") + "(" + Param("part", Path) + ")") ) # [91] PathElt ::= PathPrimary Optional(PathMod) PathElt = Comp( "PathElt", Param("part", PathPrimary) + Optional(Param("mod", PathMod.leaveWhitespace())), ) # [92] PathEltOrInverse ::= PathElt | '^' PathElt PathEltOrInverse = PathElt | Suppress("^") + Comp( "PathEltOrInverse", Param("part", PathElt) ) # [90] PathSequence ::= PathEltOrInverse ( '/' PathEltOrInverse )* PathSequence = Comp( "PathSequence", ParamList("part", PathEltOrInverse) + ZeroOrMore("/" + ParamList("part", PathEltOrInverse)), ) # [89] PathAlternative ::= PathSequence ( '|' PathSequence )* PathAlternative = Comp( "PathAlternative", ParamList("part", PathSequence) + ZeroOrMore("|" + ParamList("part", PathSequence)), ) # [88] Path ::= PathAlternative Path <<= PathAlternative # [84] VerbPath ::= Path VerbPath = Path # [87] ObjectPath ::= GraphNodePath ObjectPath = GraphNodePath # [86] ObjectListPath ::= ObjectPath ( ',' ObjectPath )* ObjectListPath = ObjectPath + ZeroOrMore("," + ObjectPath) GroupGraphPattern = Forward() # [102] Collection ::= '(' OneOrMore(GraphNode) ')' Collection = Suppress("(") + OneOrMore(GraphNode) + Suppress(")") Collection.setParseAction(expandCollection) # [103] CollectionPath ::= '(' OneOrMore(GraphNodePath) ')' CollectionPath = Suppress("(") + OneOrMore(GraphNodePath) + Suppress(")") CollectionPath.setParseAction(expandCollection) # [80] Object ::= GraphNode Object = GraphNode # [79] ObjectList ::= Object ( ',' Object )* ObjectList = Object + ZeroOrMore("," + Object) # [83] PropertyListPathNotEmpty ::= ( VerbPath | VerbSimple ) ObjectListPath ( ';' ( ( VerbPath | VerbSimple ) ObjectList )? )* PropertyListPathNotEmpty = ( (VerbPath | VerbSimple) + ObjectListPath + ZeroOrMore(";" + Optional((VerbPath | VerbSimple) + ObjectListPath)) ) # [82] PropertyListPath ::= Optional(PropertyListPathNotEmpty) PropertyListPath = Optional(PropertyListPathNotEmpty) # [77] PropertyListNotEmpty ::= Verb ObjectList ( ';' ( Verb ObjectList )? )* PropertyListNotEmpty = Verb + ObjectList + ZeroOrMore(";" + Optional(Verb + ObjectList)) # [76] PropertyList ::= Optional(PropertyListNotEmpty) PropertyList = Optional(PropertyListNotEmpty) # [99] BlankNodePropertyList ::= '[' PropertyListNotEmpty ']' BlankNodePropertyList = Group(Suppress("[") + PropertyListNotEmpty + Suppress("]")) BlankNodePropertyList.setParseAction(expandBNodeTriples) # [101] BlankNodePropertyListPath ::= '[' PropertyListPathNotEmpty ']' BlankNodePropertyListPath = Group( Suppress("[") + PropertyListPathNotEmpty + Suppress("]") ) BlankNodePropertyListPath.setParseAction(expandBNodeTriples) # [98] TriplesNode ::= Collection | BlankNodePropertyList TriplesNode <<= Collection | BlankNodePropertyList # [100] TriplesNodePath ::= CollectionPath | BlankNodePropertyListPath TriplesNodePath <<= CollectionPath | BlankNodePropertyListPath # [75] TriplesSameSubject ::= VarOrTerm PropertyListNotEmpty | TriplesNode PropertyList TriplesSameSubject = VarOrTerm + PropertyListNotEmpty | TriplesNode + PropertyList TriplesSameSubject.setParseAction(expandTriples) # [52] TriplesTemplate ::= TriplesSameSubject ( '.' TriplesTemplate? )? # NOTE: pyparsing.py handling of recursive rules is limited by python's recursion # limit. # (https://docs.python.org/3/library/sys.html#sys.setrecursionlimit) # To accommodate arbitrary amounts of triples this rule is rewritten to not be # recursive: # [52*] TriplesTemplate ::= TriplesSameSubject ( '.' TriplesSameSubject? )* TriplesTemplate = ParamList("triples", TriplesSameSubject) + ZeroOrMore( Suppress(".") + Optional(ParamList("triples", TriplesSameSubject)) ) # [51] QuadsNotTriples ::= 'GRAPH' VarOrIri '{' Optional(TriplesTemplate) '}' QuadsNotTriples = Comp( "QuadsNotTriples", Keyword("GRAPH") + Param("term", VarOrIri) + "{" + Optional(TriplesTemplate) + "}", ) # [50] Quads ::= Optional(TriplesTemplate) ( QuadsNotTriples '.'? Optional(TriplesTemplate) )* Quads = Comp( "Quads", Optional(TriplesTemplate) + ZeroOrMore( ParamList("quadsNotTriples", QuadsNotTriples) + Optional(Suppress(".")) + Optional(TriplesTemplate) ), ) # [48] QuadPattern ::= '{' Quads '}' QuadPattern = "{" + Param("quads", Quads) + "}" # [49] QuadData ::= '{' Quads '}' QuadData = "{" + Param("quads", Quads) + "}" # [81] TriplesSameSubjectPath ::= VarOrTerm PropertyListPathNotEmpty | TriplesNodePath PropertyListPath TriplesSameSubjectPath = ( VarOrTerm + PropertyListPathNotEmpty | TriplesNodePath + PropertyListPath ) TriplesSameSubjectPath.setParseAction(expandTriples) # [55] TriplesBlock ::= TriplesSameSubjectPath ( '.' Optional(TriplesBlock) )? TriplesBlock = Forward() TriplesBlock <<= ParamList("triples", TriplesSameSubjectPath) + Optional( Suppress(".") + Optional(TriplesBlock) ) # [66] MinusGraphPattern ::= 'MINUS' GroupGraphPattern MinusGraphPattern = Comp( "MinusGraphPattern", Keyword("MINUS") + Param("graph", GroupGraphPattern) ) # [67] GroupOrUnionGraphPattern ::= GroupGraphPattern ( 'UNION' GroupGraphPattern )* GroupOrUnionGraphPattern = Comp( "GroupOrUnionGraphPattern", ParamList("graph", GroupGraphPattern) + ZeroOrMore(Keyword("UNION") + ParamList("graph", GroupGraphPattern)), ) Expression = Forward() # [72] ExpressionList ::= NIL | '(' Expression ( ',' Expression )* ')' ExpressionList = NIL | Group(Suppress("(") + delimitedList(Expression) + Suppress(")")) # [122] RegexExpression ::= 'REGEX' '(' Expression ',' Expression ( ',' Expression )? ')' RegexExpression = Comp( "Builtin_REGEX", Keyword("REGEX") + "(" + Param("text", Expression) + "," + Param("pattern", Expression) + Optional("," + Param("flags", Expression)) + ")", ) RegexExpression.setEvalFn(op.Builtin_REGEX) # [123] SubstringExpression ::= 'SUBSTR' '(' Expression ',' Expression ( ',' Expression )? ')' SubstringExpression = Comp( "Builtin_SUBSTR", Keyword("SUBSTR") + "(" + Param("arg", Expression) + "," + Param("start", Expression) + Optional("," + Param("length", Expression)) + ")", ).setEvalFn(op.Builtin_SUBSTR) # [124] StrReplaceExpression ::= 'REPLACE' '(' Expression ',' Expression ',' Expression ( ',' Expression )? ')' StrReplaceExpression = Comp( "Builtin_REPLACE", Keyword("REPLACE") + "(" + Param("arg", Expression) + "," + Param("pattern", Expression) + "," + Param("replacement", Expression) + Optional("," + Param("flags", Expression)) + ")", ).setEvalFn(op.Builtin_REPLACE) # [125] ExistsFunc ::= 'EXISTS' GroupGraphPattern ExistsFunc = Comp( "Builtin_EXISTS", Keyword("EXISTS") + Param("graph", GroupGraphPattern) ).setEvalFn(op.Builtin_EXISTS) # [126] NotExistsFunc ::= 'NOT' 'EXISTS' GroupGraphPattern NotExistsFunc = Comp( "Builtin_NOTEXISTS", Keyword("NOT") + Keyword("EXISTS") + Param("graph", GroupGraphPattern), ).setEvalFn(op.Builtin_EXISTS) # [127] Aggregate ::= 'COUNT' '(' 'DISTINCT'? ( '*' | Expression ) ')' # | 'SUM' '(' Optional('DISTINCT') Expression ')' # | 'MIN' '(' Optional('DISTINCT') Expression ')' # | 'MAX' '(' Optional('DISTINCT') Expression ')' # | 'AVG' '(' Optional('DISTINCT') Expression ')' # | 'SAMPLE' '(' Optional('DISTINCT') Expression ')' # | 'GROUP_CONCAT' '(' Optional('DISTINCT') Expression ( ';' 'SEPARATOR' '=' String )? ')' _Distinct = Optional(Keyword("DISTINCT")) _AggregateParams = "(" + Param("distinct", _Distinct) + Param("vars", Expression) + ")" Aggregate = ( Comp( "Aggregate_Count", Keyword("COUNT") + "(" + Param("distinct", _Distinct) + Param("vars", "*" | Expression) + ")", ) | Comp("Aggregate_Sum", Keyword("SUM") + _AggregateParams) | Comp("Aggregate_Min", Keyword("MIN") + _AggregateParams) | Comp("Aggregate_Max", Keyword("MAX") + _AggregateParams) | Comp("Aggregate_Avg", Keyword("AVG") + _AggregateParams) | Comp("Aggregate_Sample", Keyword("SAMPLE") + _AggregateParams) | Comp( "Aggregate_GroupConcat", Keyword("GROUP_CONCAT") + "(" + Param("distinct", _Distinct) + Param("vars", Expression) + Optional(";" + Keyword("SEPARATOR") + "=" + Param("separator", String)) + ")", ) ) # [121] BuiltInCall ::= Aggregate # | 'STR' '(' + Expression + ')' # | 'LANG' '(' + Expression + ')' # | 'LANGMATCHES' '(' + Expression + ',' + Expression + ')' # | 'DATATYPE' '(' + Expression + ')' # | 'BOUND' '(' Var ')' # | 'IRI' '(' + Expression + ')' # | 'URI' '(' + Expression + ')' # | 'BNODE' ( '(' + Expression + ')' | NIL ) # | 'RAND' NIL # | 'ABS' '(' + Expression + ')' # | 'CEIL' '(' + Expression + ')' # | 'FLOOR' '(' + Expression + ')' # | 'ROUND' '(' + Expression + ')' # | 'CONCAT' ExpressionList # | SubstringExpression # | 'STRLEN' '(' + Expression + ')' # | StrReplaceExpression # | 'UCASE' '(' + Expression + ')' # | 'LCASE' '(' + Expression + ')' # | 'ENCODE_FOR_URI' '(' + Expression + ')' # | 'CONTAINS' '(' + Expression + ',' + Expression + ')' # | 'STRSTARTS' '(' + Expression + ',' + Expression + ')' # | 'STRENDS' '(' + Expression + ',' + Expression + ')' # | 'STRBEFORE' '(' + Expression + ',' + Expression + ')' # | 'STRAFTER' '(' + Expression + ',' + Expression + ')' # | 'YEAR' '(' + Expression + ')' # | 'MONTH' '(' + Expression + ')' # | 'DAY' '(' + Expression + ')' # | 'HOURS' '(' + Expression + ')' # | 'MINUTES' '(' + Expression + ')' # | 'SECONDS' '(' + Expression + ')' # | 'TIMEZONE' '(' + Expression + ')' # | 'TZ' '(' + Expression + ')' # | 'NOW' NIL # | 'UUID' NIL # | 'STRUUID' NIL # | 'MD5' '(' + Expression + ')' # | 'SHA1' '(' + Expression + ')' # | 'SHA256' '(' + Expression + ')' # | 'SHA384' '(' + Expression + ')' # | 'SHA512' '(' + Expression + ')' # | 'COALESCE' ExpressionList # | 'IF' '(' Expression ',' Expression ',' Expression ')' # | 'STRLANG' '(' + Expression + ',' + Expression + ')' # | 'STRDT' '(' + Expression + ',' + Expression + ')' # | 'sameTerm' '(' + Expression + ',' + Expression + ')' # | 'isIRI' '(' + Expression + ')' # | 'isURI' '(' + Expression + ')' # | 'isBLANK' '(' + Expression + ')' # | 'isLITERAL' '(' + Expression + ')' # | 'isNUMERIC' '(' + Expression + ')' # | RegexExpression # | ExistsFunc # | NotExistsFunc BuiltInCall = ( Aggregate | Comp( "Builtin_STR", Keyword("STR") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_STR) | Comp( "Builtin_LANG", Keyword("LANG") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_LANG) | Comp( "Builtin_LANGMATCHES", Keyword("LANGMATCHES") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_LANGMATCHES) | Comp( "Builtin_DATATYPE", Keyword("DATATYPE") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_DATATYPE) | Comp("Builtin_BOUND", Keyword("BOUND") + "(" + Param("arg", Var) + ")").setEvalFn( op.Builtin_BOUND ) | Comp( "Builtin_IRI", Keyword("IRI") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_IRI) | Comp( "Builtin_URI", Keyword("URI") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_IRI) | Comp( "Builtin_BNODE", Keyword("BNODE") + ("(" + Param("arg", Expression) + ")" | NIL) ).setEvalFn(op.Builtin_BNODE) | Comp("Builtin_RAND", Keyword("RAND") + NIL).setEvalFn(op.Builtin_RAND) | Comp( "Builtin_ABS", Keyword("ABS") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_ABS) | Comp( "Builtin_CEIL", Keyword("CEIL") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_CEIL) | Comp( "Builtin_FLOOR", Keyword("FLOOR") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_FLOOR) | Comp( "Builtin_ROUND", Keyword("ROUND") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_ROUND) | Comp( "Builtin_CONCAT", Keyword("CONCAT") + Param("arg", ExpressionList) ).setEvalFn(op.Builtin_CONCAT) | SubstringExpression | Comp( "Builtin_STRLEN", Keyword("STRLEN") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_STRLEN) | StrReplaceExpression | Comp( "Builtin_UCASE", Keyword("UCASE") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_UCASE) | Comp( "Builtin_LCASE", Keyword("LCASE") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_LCASE) | Comp( "Builtin_ENCODE_FOR_URI", Keyword("ENCODE_FOR_URI") + "(" + Param("arg", Expression) + ")", ).setEvalFn(op.Builtin_ENCODE_FOR_URI) | Comp( "Builtin_CONTAINS", Keyword("CONTAINS") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_CONTAINS) | Comp( "Builtin_STRSTARTS", Keyword("STRSTARTS") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_STRSTARTS) | Comp( "Builtin_STRENDS", Keyword("STRENDS") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_STRENDS) | Comp( "Builtin_STRBEFORE", Keyword("STRBEFORE") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_STRBEFORE) | Comp( "Builtin_STRAFTER", Keyword("STRAFTER") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_STRAFTER) | Comp( "Builtin_YEAR", Keyword("YEAR") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_YEAR) | Comp( "Builtin_MONTH", Keyword("MONTH") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_MONTH) | Comp( "Builtin_DAY", Keyword("DAY") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_DAY) | Comp( "Builtin_HOURS", Keyword("HOURS") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_HOURS) | Comp( "Builtin_MINUTES", Keyword("MINUTES") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_MINUTES) | Comp( "Builtin_SECONDS", Keyword("SECONDS") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_SECONDS) | Comp( "Builtin_TIMEZONE", Keyword("TIMEZONE") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_TIMEZONE) | Comp( "Builtin_TZ", Keyword("TZ") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_TZ) | Comp("Builtin_NOW", Keyword("NOW") + NIL).setEvalFn(op.Builtin_NOW) | Comp("Builtin_UUID", Keyword("UUID") + NIL).setEvalFn(op.Builtin_UUID) | Comp("Builtin_STRUUID", Keyword("STRUUID") + NIL).setEvalFn(op.Builtin_STRUUID) | Comp( "Builtin_MD5", Keyword("MD5") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_MD5) | Comp( "Builtin_SHA1", Keyword("SHA1") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_SHA1) | Comp( "Builtin_SHA256", Keyword("SHA256") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_SHA256) | Comp( "Builtin_SHA384", Keyword("SHA384") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_SHA384) | Comp( "Builtin_SHA512", Keyword("SHA512") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_SHA512) | Comp( "Builtin_COALESCE", Keyword("COALESCE") + Param("arg", ExpressionList) ).setEvalFn(op.Builtin_COALESCE) | Comp( "Builtin_IF", Keyword("IF") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + "," + Param("arg3", Expression) + ")", ).setEvalFn(op.Builtin_IF) | Comp( "Builtin_STRLANG", Keyword("STRLANG") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_STRLANG) | Comp( "Builtin_STRDT", Keyword("STRDT") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_STRDT) | Comp( "Builtin_sameTerm", Keyword("sameTerm") + "(" + Param("arg1", Expression) + "," + Param("arg2", Expression) + ")", ).setEvalFn(op.Builtin_sameTerm) | Comp( "Builtin_isIRI", Keyword("isIRI") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_isIRI) | Comp( "Builtin_isURI", Keyword("isURI") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_isIRI) | Comp( "Builtin_isBLANK", Keyword("isBLANK") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_isBLANK) | Comp( "Builtin_isLITERAL", Keyword("isLITERAL") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_isLITERAL) | Comp( "Builtin_isNUMERIC", Keyword("isNUMERIC") + "(" + Param("arg", Expression) + ")" ).setEvalFn(op.Builtin_isNUMERIC) | RegexExpression | ExistsFunc | NotExistsFunc ) # [71] ArgList ::= NIL | '(' 'DISTINCT'? Expression ( ',' Expression )* ')' ArgList = ( NIL | "(" + Param("distinct", _Distinct) + delimitedList(ParamList("expr", Expression)) + ")" ) # [128] iriOrFunction ::= iri Optional(ArgList) iriOrFunction = ( Comp("Function", Param("iri", iri) + ArgList).setEvalFn(op.Function) ) | iri # [70] FunctionCall ::= iri ArgList FunctionCall = Comp("Function", Param("iri", iri) + ArgList).setEvalFn(op.Function) # [120] BrackettedExpression ::= '(' Expression ')' BrackettedExpression = Suppress("(") + Expression + Suppress(")") # [119] PrimaryExpression ::= BrackettedExpression | BuiltInCall | iriOrFunction | RDFLiteral | NumericLiteral | BooleanLiteral | Var PrimaryExpression = ( BrackettedExpression | BuiltInCall | iriOrFunction | RDFLiteral | NumericLiteral | BooleanLiteral | Var ) # [118] UnaryExpression ::= '!' PrimaryExpression # | '+' PrimaryExpression # | '-' PrimaryExpression # | PrimaryExpression UnaryExpression = ( Comp("UnaryNot", "!" + Param("expr", PrimaryExpression)).setEvalFn(op.UnaryNot) | Comp("UnaryPlus", "+" + Param("expr", PrimaryExpression)).setEvalFn(op.UnaryPlus) | Comp("UnaryMinus", "-" + Param("expr", PrimaryExpression)).setEvalFn( op.UnaryMinus ) | PrimaryExpression ) # [117] MultiplicativeExpression ::= UnaryExpression ( '*' UnaryExpression | '/' UnaryExpression )* MultiplicativeExpression = Comp( "MultiplicativeExpression", Param("expr", UnaryExpression) + ZeroOrMore( ParamList("op", "*") + ParamList("other", UnaryExpression) | ParamList("op", "/") + ParamList("other", UnaryExpression) ), ).setEvalFn(op.MultiplicativeExpression) # [116] AdditiveExpression ::= MultiplicativeExpression ( '+' MultiplicativeExpression | '-' MultiplicativeExpression | ( NumericLiteralPositive | NumericLiteralNegative ) ( ( '*' UnaryExpression ) | ( '/' UnaryExpression ) )* )* # NOTE: The second part of this production is there because: # "In signed numbers, no white space is allowed between the sign and the number. The AdditiveExpression grammar rule allows for this by covering the two cases of an expression followed by a signed number. These produce an addition or subtraction of the unsigned number as appropriate." # Here (I think) this is not necessary since pyparsing doesn't separate # tokenizing and parsing AdditiveExpression = Comp( "AdditiveExpression", Param("expr", MultiplicativeExpression) + ZeroOrMore( ParamList("op", "+") + ParamList("other", MultiplicativeExpression) | ParamList("op", "-") + ParamList("other", MultiplicativeExpression) ), ).setEvalFn(op.AdditiveExpression) # [115] NumericExpression ::= AdditiveExpression NumericExpression = AdditiveExpression # [114] RelationalExpression ::= NumericExpression ( '=' NumericExpression | '!=' NumericExpression | '<' NumericExpression | '>' NumericExpression | '<=' NumericExpression | '>=' NumericExpression | 'IN' ExpressionList | 'NOT' 'IN' ExpressionList )? RelationalExpression = Comp( "RelationalExpression", Param("expr", NumericExpression) + Optional( Param("op", "=") + Param("other", NumericExpression) | Param("op", "!=") + Param("other", NumericExpression) | Param("op", "<") + Param("other", NumericExpression) | Param("op", ">") + Param("other", NumericExpression) | Param("op", "<=") + Param("other", NumericExpression) | Param("op", ">=") + Param("other", NumericExpression) | Param("op", Keyword("IN")) + Param("other", ExpressionList) | Param( "op", Combine(Keyword("NOT") + Keyword("IN"), adjacent=False, joinString=" "), ) + Param("other", ExpressionList) ), ).setEvalFn(op.RelationalExpression) # [113] ValueLogical ::= RelationalExpression ValueLogical = RelationalExpression # [112] ConditionalAndExpression ::= ValueLogical ( '&&' ValueLogical )* ConditionalAndExpression = Comp( "ConditionalAndExpression", Param("expr", ValueLogical) + ZeroOrMore("&&" + ParamList("other", ValueLogical)), ).setEvalFn(op.ConditionalAndExpression) # [111] ConditionalOrExpression ::= ConditionalAndExpression ( '||' ConditionalAndExpression )* ConditionalOrExpression = Comp( "ConditionalOrExpression", Param("expr", ConditionalAndExpression) + ZeroOrMore("||" + ParamList("other", ConditionalAndExpression)), ).setEvalFn(op.ConditionalOrExpression) # [110] Expression ::= ConditionalOrExpression Expression <<= ConditionalOrExpression # [69] Constraint ::= BrackettedExpression | BuiltInCall | FunctionCall Constraint = BrackettedExpression | BuiltInCall | FunctionCall # [68] Filter ::= 'FILTER' Constraint Filter = Comp("Filter", Keyword("FILTER") + Param("expr", Constraint)) # [16] SourceSelector ::= iri SourceSelector = iri # [14] DefaultGraphClause ::= SourceSelector DefaultGraphClause = SourceSelector # [15] NamedGraphClause ::= 'NAMED' SourceSelector NamedGraphClause = Keyword("NAMED") + Param("named", SourceSelector) # [13] DatasetClause ::= 'FROM' ( DefaultGraphClause | NamedGraphClause ) DatasetClause = Comp( "DatasetClause", Keyword("FROM") + (Param("default", DefaultGraphClause) | NamedGraphClause), ) # [20] GroupCondition ::= BuiltInCall | FunctionCall | '(' Expression ( 'AS' Var )? ')' | Var GroupCondition = ( BuiltInCall | FunctionCall | Comp( "GroupAs", "(" + Param("expr", Expression) + Optional(Keyword("AS") + Param("var", Var)) + ")", ) | Var ) # [19] GroupClause ::= 'GROUP' 'BY' GroupCondition+ GroupClause = Comp( "GroupClause", Keyword("GROUP") + Keyword("BY") + OneOrMore(ParamList("condition", GroupCondition)), ) _Silent = Optional(Param("silent", Keyword("SILENT"))) # [31] Load ::= 'LOAD' 'SILENT'? iri ( 'INTO' GraphRef )? Load = Comp( "Load", Keyword("LOAD") + _Silent + Param("iri", iri) + Optional(Keyword("INTO") + GraphRef), ) # [32] Clear ::= 'CLEAR' 'SILENT'? GraphRefAll Clear = Comp("Clear", Keyword("CLEAR") + _Silent + GraphRefAll) # [33] Drop ::= 'DROP' _Silent GraphRefAll Drop = Comp("Drop", Keyword("DROP") + _Silent + GraphRefAll) # [34] Create ::= 'CREATE' _Silent GraphRef Create = Comp("Create", Keyword("CREATE") + _Silent + GraphRef) # [35] Add ::= 'ADD' _Silent GraphOrDefault 'TO' GraphOrDefault Add = Comp( "Add", Keyword("ADD") + _Silent + GraphOrDefault + Keyword("TO") + GraphOrDefault ) # [36] Move ::= 'MOVE' _Silent GraphOrDefault 'TO' GraphOrDefault Move = Comp( "Move", Keyword("MOVE") + _Silent + GraphOrDefault + Keyword("TO") + GraphOrDefault ) # [37] Copy ::= 'COPY' _Silent GraphOrDefault 'TO' GraphOrDefault Copy = Comp( "Copy", Keyword("COPY") + _Silent + GraphOrDefault + Keyword("TO") + GraphOrDefault ) # [38] InsertData ::= 'INSERT DATA' QuadData InsertData = Comp("InsertData", Keyword("INSERT") + Keyword("DATA") + QuadData) # [39] DeleteData ::= 'DELETE DATA' QuadData DeleteData = Comp("DeleteData", Keyword("DELETE") + Keyword("DATA") + QuadData) # [40] DeleteWhere ::= 'DELETE WHERE' QuadPattern DeleteWhere = Comp("DeleteWhere", Keyword("DELETE") + Keyword("WHERE") + QuadPattern) # [42] DeleteClause ::= 'DELETE' QuadPattern DeleteClause = Comp("DeleteClause", Keyword("DELETE") + QuadPattern) # [43] InsertClause ::= 'INSERT' QuadPattern InsertClause = Comp("InsertClause", Keyword("INSERT") + QuadPattern) # [44] UsingClause ::= 'USING' ( iri | 'NAMED' iri ) UsingClause = Comp( "UsingClause", Keyword("USING") + (Param("default", iri) | Keyword("NAMED") + Param("named", iri)), ) # [41] Modify ::= ( 'WITH' iri )? ( DeleteClause Optional(InsertClause) | InsertClause ) ZeroOrMore(UsingClause) 'WHERE' GroupGraphPattern Modify = Comp( "Modify", Optional(Keyword("WITH") + Param("withClause", iri)) + ( Param("delete", DeleteClause) + Optional(Param("insert", InsertClause)) | Param("insert", InsertClause) ) + ZeroOrMore(ParamList("using", UsingClause)) + Keyword("WHERE") + Param("where", GroupGraphPattern), ) # [30] Update1 ::= Load | Clear | Drop | Add | Move | Copy | Create | InsertData | DeleteData | DeleteWhere | Modify Update1 = ( Load | Clear | Drop | Add | Move | Copy | Create | InsertData | DeleteData | DeleteWhere | Modify ) # [63] InlineDataOneVar ::= Var '{' ZeroOrMore(DataBlockValue) '}' InlineDataOneVar = ( ParamList("var", Var) + "{" + ZeroOrMore(ParamList("value", DataBlockValue)) + "}" ) # [64] InlineDataFull ::= ( NIL | '(' ZeroOrMore(Var) ')' ) '{' ( '(' ZeroOrMore(DataBlockValue) ')' | NIL )* '}' InlineDataFull = ( (NIL | "(" + ZeroOrMore(ParamList("var", Var)) + ")") + "{" + ZeroOrMore( ParamList( "value", Group(Suppress("(") + ZeroOrMore(DataBlockValue) + Suppress(")") | NIL), ) ) + "}" ) # [62] DataBlock ::= InlineDataOneVar | InlineDataFull DataBlock = InlineDataOneVar | InlineDataFull # [28] ValuesClause ::= ( 'VALUES' DataBlock )? ValuesClause = Optional( Param("valuesClause", Comp("ValuesClause", Keyword("VALUES") + DataBlock)) ) # [74] ConstructTriples ::= TriplesSameSubject ( '.' Optional(ConstructTriples) )? ConstructTriples = Forward() ConstructTriples <<= ParamList("template", TriplesSameSubject) + Optional( Suppress(".") + Optional(ConstructTriples) ) # [73] ConstructTemplate ::= '{' Optional(ConstructTriples) '}' ConstructTemplate = Suppress("{") + Optional(ConstructTriples) + Suppress("}") # [57] OptionalGraphPattern ::= 'OPTIONAL' GroupGraphPattern OptionalGraphPattern = Comp( "OptionalGraphPattern", Keyword("OPTIONAL") + Param("graph", GroupGraphPattern) ) # [58] GraphGraphPattern ::= 'GRAPH' VarOrIri GroupGraphPattern GraphGraphPattern = Comp( "GraphGraphPattern", Keyword("GRAPH") + Param("term", VarOrIri) + Param("graph", GroupGraphPattern), ) # [59] ServiceGraphPattern ::= 'SERVICE' _Silent VarOrIri GroupGraphPattern ServiceGraphPattern = Comp( "ServiceGraphPattern", Keyword("SERVICE") + _Silent + Param("term", VarOrIri) + Param("graph", GroupGraphPattern), ) # [60] Bind ::= 'BIND' '(' Expression 'AS' Var ')' Bind = Comp( "Bind", Keyword("BIND") + "(" + Param("expr", Expression) + Keyword("AS") + Param("var", Var) + ")", ) # [61] InlineData ::= 'VALUES' DataBlock InlineData = Comp("InlineData", Keyword("VALUES") + DataBlock) # [56] GraphPatternNotTriples ::= GroupOrUnionGraphPattern | OptionalGraphPattern | MinusGraphPattern | GraphGraphPattern | ServiceGraphPattern | Filter | Bind | InlineData GraphPatternNotTriples = ( GroupOrUnionGraphPattern | OptionalGraphPattern | MinusGraphPattern | GraphGraphPattern | ServiceGraphPattern | Filter | Bind | InlineData ) # [54] GroupGraphPatternSub ::= Optional(TriplesBlock) ( GraphPatternNotTriples '.'? Optional(TriplesBlock) )* GroupGraphPatternSub = Comp( "GroupGraphPatternSub", Optional(ParamList("part", Comp("TriplesBlock", TriplesBlock))) + ZeroOrMore( ParamList("part", GraphPatternNotTriples) + Optional(".") + Optional(ParamList("part", Comp("TriplesBlock", TriplesBlock))) ), ) # ---------------- # [22] HavingCondition ::= Constraint HavingCondition = Constraint # [21] HavingClause ::= 'HAVING' HavingCondition+ HavingClause = Comp( "HavingClause", Keyword("HAVING") + OneOrMore(ParamList("condition", HavingCondition)), ) # [24] OrderCondition ::= ( ( 'ASC' | 'DESC' ) BrackettedExpression ) # | ( Constraint | Var ) OrderCondition = Comp( "OrderCondition", Param("order", Keyword("ASC") | Keyword("DESC")) + Param("expr", BrackettedExpression) | Param("expr", Constraint | Var), ) # [23] OrderClause ::= 'ORDER' 'BY' OneOrMore(OrderCondition) OrderClause = Comp( "OrderClause", Keyword("ORDER") + Keyword("BY") + OneOrMore(ParamList("condition", OrderCondition)), ) # [26] LimitClause ::= 'LIMIT' INTEGER LimitClause = Keyword("LIMIT") + Param("limit", INTEGER) # [27] OffsetClause ::= 'OFFSET' INTEGER OffsetClause = Keyword("OFFSET") + Param("offset", INTEGER) # [25] LimitOffsetClauses ::= LimitClause Optional(OffsetClause) | OffsetClause Optional(LimitClause) LimitOffsetClauses = Comp( "LimitOffsetClauses", LimitClause + Optional(OffsetClause) | OffsetClause + Optional(LimitClause), ) # [18] SolutionModifier ::= GroupClause? HavingClause? OrderClause? LimitOffsetClauses? SolutionModifier = ( Optional(Param("groupby", GroupClause)) + Optional(Param("having", HavingClause)) + Optional(Param("orderby", OrderClause)) + Optional(Param("limitoffset", LimitOffsetClauses)) ) # [9] SelectClause ::= 'SELECT' ( 'DISTINCT' | 'REDUCED' )? ( ( Var | ( '(' Expression 'AS' Var ')' ) )+ | '*' ) SelectClause = ( Keyword("SELECT") + Optional(Param("modifier", Keyword("DISTINCT") | Keyword("REDUCED"))) + ( OneOrMore( ParamList( "projection", Comp( "vars", Param("var", Var) | ( Literal("(") + Param("expr", Expression) + Keyword("AS") + Param("evar", Var) + ")" ), ), ) ) | "*" ) ) # [17] WhereClause ::= 'WHERE'? GroupGraphPattern WhereClause = Optional(Keyword("WHERE")) + Param("where", GroupGraphPattern) # [8] SubSelect ::= SelectClause WhereClause SolutionModifier ValuesClause SubSelect = Comp( "SubSelect", SelectClause + WhereClause + SolutionModifier + ValuesClause ) # [53] GroupGraphPattern ::= '{' ( SubSelect | GroupGraphPatternSub ) '}' GroupGraphPattern <<= Suppress("{") + (SubSelect | GroupGraphPatternSub) + Suppress("}") # [7] SelectQuery ::= SelectClause DatasetClause* WhereClause SolutionModifier SelectQuery = Comp( "SelectQuery", SelectClause + ZeroOrMore(ParamList("datasetClause", DatasetClause)) + WhereClause + SolutionModifier + ValuesClause, ) # [10] ConstructQuery ::= 'CONSTRUCT' ( ConstructTemplate DatasetClause* WhereClause SolutionModifier | DatasetClause* 'WHERE' '{' TriplesTemplate? '}' SolutionModifier ) # NOTE: The CONSTRUCT WHERE alternative has unnecessarily many Comp/Param pairs # to allow it to through the same algebra translation process ConstructQuery = Comp( "ConstructQuery", Keyword("CONSTRUCT") + ( ConstructTemplate + ZeroOrMore(ParamList("datasetClause", DatasetClause)) + WhereClause + SolutionModifier + ValuesClause | ZeroOrMore(ParamList("datasetClause", DatasetClause)) + Keyword("WHERE") + "{" + Optional( Param( "where", Comp( "FakeGroupGraphPatten", ParamList("part", Comp("TriplesBlock", TriplesTemplate)), ), ) ) + "}" + SolutionModifier + ValuesClause ), ) # [12] AskQuery ::= 'ASK' DatasetClause* WhereClause SolutionModifier AskQuery = Comp( "AskQuery", Keyword("ASK") + Param("datasetClause", ZeroOrMore(DatasetClause)) + WhereClause + SolutionModifier + ValuesClause, ) # [11] DescribeQuery ::= 'DESCRIBE' ( VarOrIri+ | '*' ) DatasetClause* WhereClause? SolutionModifier DescribeQuery = Comp( "DescribeQuery", Keyword("DESCRIBE") + (OneOrMore(ParamList("var", VarOrIri)) | "*") + Param("datasetClause", ZeroOrMore(DatasetClause)) + Optional(WhereClause) + SolutionModifier + ValuesClause, ) # [29] Update ::= Prologue ( Update1 ( ';' Update )? )? Update = Forward() Update <<= ParamList("prologue", Prologue) + Optional( ParamList("request", Update1) + Optional(";" + Update) ) # [2] Query ::= Prologue # ( SelectQuery | ConstructQuery | DescribeQuery | AskQuery ) # ValuesClause # NOTE: ValuesClause was moved to individual queries Query = Prologue + (SelectQuery | ConstructQuery | DescribeQuery | AskQuery) # [3] UpdateUnit ::= Update UpdateUnit = Comp("Update", Update) # [1] QueryUnit ::= Query QueryUnit = Query QueryUnit.ignore("#" + restOfLine) UpdateUnit.ignore("#" + restOfLine) expandUnicodeEscapes_re = re.compile(r"\\u([0-9a-f]{4}(?:[0-9a-f]{4})?)", flags=re.I) def expandUnicodeEscapes(q): r""" The syntax of the SPARQL Query Language is expressed over code points in Unicode [UNICODE]. The encoding is always UTF-8 [RFC3629]. Unicode code points may also be expressed using an \ uXXXX (U+0 to U+FFFF) or \ UXXXXXXXX syntax (for U+10000 onwards) where X is a hexadecimal digit [0-9A-F] """ def expand(m): try: return chr(int(m.group(1), 16)) except: raise Exception("Invalid unicode code point: " + m) return expandUnicodeEscapes_re.sub(expand, q) def parseQuery(q): if hasattr(q, "read"): q = q.read() if isinstance(q, bytes): q = q.decode("utf-8") q = expandUnicodeEscapes(q) return Query.parseString(q, parseAll=True) def parseUpdate(q): if hasattr(q, "read"): q = q.read() if isinstance(q, bytes): q = q.decode("utf-8") q = expandUnicodeEscapes(q) return UpdateUnit.parseString(q, parseAll=True)[0] if __name__ == "__main__": import sys DEBUG = True try: q = Query.parseString(sys.argv[1]) print("\nSyntax Tree:\n") print(q) except ParseException as err: print(err.line) print(" " * (err.column - 1) + "^") print(err) rdflib-6.1.1/rdflib/plugins/sparql/parserutils.py000066400000000000000000000175611415774155300221630ustar00rootroot00000000000000from types import MethodType from collections import OrderedDict from pyparsing import TokenConverter, ParseResults, originalTextFor from rdflib import BNode, Variable """ NOTE: PyParsing setResultName/__call__ provides a very similar solution to this I didn't realise at the time of writing and I will remove a lot of this code at some point Utility classes for creating an abstract-syntax tree out with pyparsing actions Lets you label and group parts of parser production rules For example: # [5] BaseDecl ::= 'BASE' IRIREF BaseDecl = Comp('Base', Keyword('BASE') + Param('iri',IRIREF)) After parsing, this gives you back an CompValue object, which is a dict/object with the parameters specified. So you can access the parameters are attributes or as keys: baseDecl.iri Comp lets you set an evalFn that is bound to the eval method of the resulting CompValue """ # This is an alternative # Comp('Sum')( Param('x')(Number) + '+' + Param('y')(Number) ) def value(ctx, val, variables=False, errors=False): """ utility function for evaluating something... Variables will be looked up in the context Normally, non-bound vars is an error, set variables=True to return unbound vars Normally, an error raises the error, set errors=True to return error """ if isinstance(val, Expr): return val.eval(ctx) # recurse? elif isinstance(val, CompValue): raise Exception("What do I do with this CompValue? %s" % val) elif isinstance(val, list): return [value(ctx, x, variables, errors) for x in val] elif isinstance(val, (BNode, Variable)): r = ctx.get(val) if isinstance(r, SPARQLError) and not errors: raise r if r is not None: return r # not bound if variables: return val else: raise NotBoundError elif isinstance(val, ParseResults) and len(val) == 1: return value(ctx, val[0], variables, errors) else: return val class ParamValue(object): """ The result of parsing a Param This just keeps the name/value All cleverness is in the CompValue """ def __init__(self, name, tokenList, isList): self.isList = isList self.name = name if isinstance(tokenList, (list, ParseResults)) and len(tokenList) == 1: tokenList = tokenList[0] self.tokenList = tokenList def __str__(self): return "Param(%s, %s)" % (self.name, self.tokenList) class Param(TokenConverter): """ A pyparsing token for labelling a part of the parse-tree if isList is true repeat occurrences of ParamList have their values merged in a list """ def __init__(self, name, expr, isList=False): self.isList = isList TokenConverter.__init__(self, expr) self.setName(name) self.addParseAction(self.postParse2) def postParse2(self, tokenList): return ParamValue(self.name, tokenList, self.isList) class ParamList(Param): """ A shortcut for a Param with isList=True """ def __init__(self, name, expr): Param.__init__(self, name, expr, True) class plist(list): """this is just a list, but we want our own type to check for""" pass class CompValue(OrderedDict): """ The result of parsing a Comp Any included Params are available as Dict keys or as attributes """ def __init__(self, name, **values): OrderedDict.__init__(self) self.name = name self.update(values) def clone(self): return CompValue(self.name, **self) def __str__(self): return self.name + "_" + OrderedDict.__str__(self) def __repr__(self): return self.name + "_" + dict.__repr__(self) def _value(self, val, variables=False, errors=False): if self.ctx is not None: return value(self.ctx, val, variables) else: return val def __getitem__(self, a): return self._value(OrderedDict.__getitem__(self, a)) def get(self, a, variables=False, errors=False): return self._value(OrderedDict.get(self, a, a), variables, errors) def __getattr__(self, a): # Hack hack: OrderedDict relies on this if a in ("_OrderedDict__root", "_OrderedDict__end"): raise AttributeError() try: return self[a] except KeyError: # raise AttributeError('no such attribute '+a) return None class Expr(CompValue): """ A CompValue that is evaluatable """ def __init__(self, name, evalfn=None, **values): super(Expr, self).__init__(name, **values) self._evalfn = None if evalfn: self._evalfn = MethodType(evalfn, self) def eval(self, ctx={}): try: self.ctx = ctx return self._evalfn(ctx) except SPARQLError as e: return e finally: self.ctx = None class Comp(TokenConverter): """ A pyparsing token for grouping together things with a label Any sub-tokens that are not Params will be ignored. Returns CompValue / Expr objects - depending on whether evalFn is set. """ def __init__(self, name, expr): self.expr = expr TokenConverter.__init__(self, expr) self.setName(name) self.evalfn = None def postParse(self, instring, loc, tokenList): if self.evalfn: res = Expr(self.name) res._evalfn = MethodType(self.evalfn, res) else: res = CompValue(self.name) if self.name == "ServiceGraphPattern": # Then this must be a service graph pattern and have # already matched. # lets assume there is one, for now, then test for two later. sgp = originalTextFor(self.expr) service_string = sgp.searchString(instring)[0][0] res["service_string"] = service_string for t in tokenList: if isinstance(t, ParamValue): if t.isList: if t.name not in res: res[t.name] = plist() res[t.name].append(t.tokenList) else: res[t.name] = t.tokenList # res.append(t.tokenList) # if isinstance(t,CompValue): # res.update(t) return res def setEvalFn(self, evalfn): self.evalfn = evalfn return self def prettify_parsetree(t, indent="", depth=0): out = [] if isinstance(t, ParseResults): for e in t.asList(): out.append(prettify_parsetree(e, indent, depth + 1)) for k, v in sorted(t.items()): out.append("%s%s- %s:\n" % (indent, " " * depth, k)) out.append(prettify_parsetree(v, indent, depth + 1)) elif isinstance(t, CompValue): out.append("%s%s> %s:\n" % (indent, " " * depth, t.name)) for k, v in t.items(): out.append("%s%s- %s:\n" % (indent, " " * (depth + 1), k)) out.append(prettify_parsetree(v, indent, depth + 2)) elif isinstance(t, dict): for k, v in t.items(): out.append("%s%s- %s:\n" % (indent, " " * (depth + 1), k)) out.append(prettify_parsetree(v, indent, depth + 2)) elif isinstance(t, list): for e in t: out.append(prettify_parsetree(e, indent, depth + 1)) else: out.append("%s%s- %r\n" % (indent, " " * depth, t)) return "".join(out) if __name__ == "__main__": from pyparsing import Word, nums import sys Number = Word(nums) Number.setParseAction(lambda x: int(x[0])) Plus = Comp("plus", Param("a", Number) + "+" + Param("b", Number)) Plus.setEvalFn(lambda self, ctx: self.a + self.b) r = Plus.parseString(sys.argv[1]) print(r) print(r[0].eval({})) # hurrah for circular imports from rdflib.plugins.sparql.sparql import SPARQLError, NotBoundError rdflib-6.1.1/rdflib/plugins/sparql/processor.py000066400000000000000000000044041415774155300216150ustar00rootroot00000000000000""" Code for tying SPARQL Engine into RDFLib These should be automatically registered with RDFLib """ from rdflib.query import Processor, Result, UpdateProcessor from rdflib.plugins.sparql.sparql import Query from rdflib.plugins.sparql.parser import parseQuery, parseUpdate from rdflib.plugins.sparql.algebra import translateQuery, translateUpdate from rdflib.plugins.sparql.evaluate import evalQuery from rdflib.plugins.sparql.update import evalUpdate def prepareQuery(queryString, initNs={}, base=None): """ Parse and translate a SPARQL Query """ ret = translateQuery(parseQuery(queryString), base, initNs) ret._original_args = (queryString, initNs, base) return ret def processUpdate(graph, updateString, initBindings={}, initNs={}, base=None): """ Process a SPARQL Update Request returns Nothing on success or raises Exceptions on error """ evalUpdate( graph, translateUpdate(parseUpdate(updateString), base, initNs), initBindings ) class SPARQLResult(Result): def __init__(self, res): Result.__init__(self, res["type_"]) self.vars = res.get("vars_") self.bindings = res.get("bindings") self.askAnswer = res.get("askAnswer") self.graph = res.get("graph") class SPARQLUpdateProcessor(UpdateProcessor): def __init__(self, graph): self.graph = graph def update(self, strOrQuery, initBindings={}, initNs={}): if isinstance(strOrQuery, str): strOrQuery = translateUpdate(parseUpdate(strOrQuery), initNs=initNs) return evalUpdate(self.graph, strOrQuery, initBindings) class SPARQLProcessor(Processor): def __init__(self, graph): self.graph = graph def query(self, strOrQuery, initBindings={}, initNs={}, base=None, DEBUG=False): """ Evaluate a query with the given initial bindings, and initial namespaces. The given base is used to resolve relative URIs in the query and will be overridden by any BASE given in the query. """ if not isinstance(strOrQuery, Query): parsetree = parseQuery(strOrQuery) query = translateQuery(parsetree, base, initNs) else: query = strOrQuery return evalQuery(self.graph, query, initBindings, base) rdflib-6.1.1/rdflib/plugins/sparql/results/000077500000000000000000000000001415774155300207235ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/sparql/results/__init__.py000066400000000000000000000000721415774155300230330ustar00rootroot00000000000000""" Parsers and serializers for SPARQL Result formats """ rdflib-6.1.1/rdflib/plugins/sparql/results/csvresults.py000066400000000000000000000046631415774155300235230ustar00rootroot00000000000000""" This module implements a parser and serializer for the CSV SPARQL result formats http://www.w3.org/TR/sparql11-results-csv-tsv/ """ import codecs import csv from typing import IO from rdflib import Variable, BNode, URIRef, Literal from rdflib.query import Result, ResultSerializer, ResultParser class CSVResultParser(ResultParser): def __init__(self): self.delim = "," def parse(self, source, content_type=None): r = Result("SELECT") if isinstance(source.read(0), bytes): # if reading from source returns bytes do utf-8 decoding source = codecs.getreader("utf-8")(source) reader = csv.reader(source, delimiter=self.delim) r.vars = [Variable(x) for x in next(reader)] r.bindings = [] for row in reader: r.bindings.append(self.parseRow(row, r.vars)) return r def parseRow(self, row, v): return dict( (var, val) for var, val in zip(v, [self.convertTerm(t) for t in row]) if val is not None ) def convertTerm(self, t): if t == "": return None if t.startswith("_:"): return BNode(t) # or generate new IDs? if t.startswith("http://") or t.startswith("https://"): # TODO: more? return URIRef(t) return Literal(t) class CSVResultSerializer(ResultSerializer): def __init__(self, result): ResultSerializer.__init__(self, result) self.delim = "," if result.type != "SELECT": raise Exception("CSVSerializer can only serialize select query results") def serialize(self, stream: IO, encoding: str = "utf-8", **kwargs): # the serialiser writes bytes in the given encoding # in py3 csv.writer is unicode aware and writes STRINGS, # so we encode afterwards import codecs stream = codecs.getwriter(encoding)(stream) # type: ignore[assignment] out = csv.writer(stream, delimiter=self.delim) vs = [self.serializeTerm(v, encoding) for v in self.result.vars] # type: ignore[union-attr] out.writerow(vs) for row in self.result.bindings: out.writerow( [self.serializeTerm(row.get(v), encoding) for v in self.result.vars] # type: ignore[union-attr] ) def serializeTerm(self, term, encoding): if term is None: return "" else: return term rdflib-6.1.1/rdflib/plugins/sparql/results/graph.py000066400000000000000000000005001415774155300223710ustar00rootroot00000000000000from rdflib import Graph from rdflib.query import Result, ResultParser class GraphResultParser(ResultParser): def parse(self, source, content_type): res = Result("CONSTRUCT") # hmm - or describe?type_) res.graph = Graph() res.graph.parse(source, format=content_type) return res rdflib-6.1.1/rdflib/plugins/sparql/results/jsonresults.py000066400000000000000000000072521415774155300236760ustar00rootroot00000000000000import json from typing import IO, Any, Dict, Optional, TextIO, Union from rdflib.query import Result, ResultException, ResultSerializer, ResultParser from rdflib import Literal, URIRef, BNode, Variable """A Serializer for SPARQL results in JSON: http://www.w3.org/TR/rdf-sparql-json-res/ Bits and pieces borrowed from: http://projects.bigasterisk.com/sparqlhttp/ Authors: Drew Perttula, Gunnar Aastrand Grimnes """ class JSONResultParser(ResultParser): def parse(self, source, content_type=None): inp = source.read() if isinstance(inp, bytes): inp = inp.decode("utf-8") return JSONResult(json.loads(inp)) class JSONResultSerializer(ResultSerializer): def __init__(self, result): ResultSerializer.__init__(self, result) def serialize(self, stream: IO, encoding: str = None): # type: ignore[override] res: Dict[str, Any] = {} if self.result.type == "ASK": res["head"] = {} res["boolean"] = self.result.askAnswer else: # select res["results"] = {} res["head"] = {} res["head"]["vars"] = self.result.vars res["results"]["bindings"] = [ self._bindingToJSON(x) for x in self.result.bindings ] r = json.dumps(res, allow_nan=False, ensure_ascii=False) if encoding is not None: stream.write(r.encode(encoding)) else: stream.write(r) def _bindingToJSON(self, b): res = {} for var in b: j = termToJSON(self, b[var]) if j is not None: res[var] = termToJSON(self, b[var]) return res class JSONResult(Result): def __init__(self, json): self.json = json if "boolean" in json: type_ = "ASK" elif "results" in json: type_ = "SELECT" else: raise ResultException("No boolean or results in json!") Result.__init__(self, type_) if type_ == "ASK": self.askAnswer = bool(json["boolean"]) else: self.bindings = self._get_bindings() self.vars = [Variable(x) for x in json["head"]["vars"]] def _get_bindings(self): ret = [] for row in self.json["results"]["bindings"]: outRow = {} for k, v in row.items(): outRow[Variable(k)] = parseJsonTerm(v) ret.append(outRow) return ret def parseJsonTerm(d): """rdflib object (Literal, URIRef, BNode) for the given json-format dict. input is like: { 'type': 'uri', 'value': 'http://famegame.com/2006/01/username' } { 'type': 'literal', 'value': 'drewp' } """ t = d["type"] if t == "uri": return URIRef(d["value"]) elif t == "literal": return Literal(d["value"], datatype=d.get("datatype"), lang=d.get("xml:lang")) elif t == "typed-literal": return Literal(d["value"], datatype=URIRef(d["datatype"])) elif t == "bnode": return BNode(d["value"]) else: raise NotImplementedError("json term type %r" % t) def termToJSON(self, term): if isinstance(term, URIRef): return {"type": "uri", "value": str(term)} elif isinstance(term, Literal): r = {"type": "literal", "value": str(term)} if term.datatype is not None: r["datatype"] = str(term.datatype) if term.language is not None: r["xml:lang"] = term.language return r elif isinstance(term, BNode): return {"type": "bnode", "value": str(term)} elif term is None: return None else: raise ResultException("Unknown term type: %s (%s)" % (term, type(term))) rdflib-6.1.1/rdflib/plugins/sparql/results/rdfresults.py000066400000000000000000000032241415774155300234730ustar00rootroot00000000000000from rdflib import Graph, Namespace, RDF, Variable from rdflib.query import Result, ResultParser RS = Namespace("http://www.w3.org/2001/sw/DataAccess/tests/result-set#") class RDFResultParser(ResultParser): def parse(self, source, **kwargs): return RDFResult(source, **kwargs) class RDFResult(Result): def __init__(self, source, **kwargs): if not isinstance(source, Graph): graph = Graph() graph.parse(source, **kwargs) else: graph = source rs = graph.value(predicate=RDF.type, object=RS.ResultSet) # there better be only one :) if rs is None: type_ = "CONSTRUCT" # use a new graph g = Graph() g += graph else: askAnswer = graph.value(rs, RS.boolean) if askAnswer is not None: type_ = "ASK" else: type_ = "SELECT" Result.__init__(self, type_) if type_ == "SELECT": self.vars = [Variable(v) for v in graph.objects(rs, RS.resultVariable)] self.bindings = [] for s in graph.objects(rs, RS.solution): sol = {} for b in graph.objects(s, RS.binding): sol[Variable(graph.value(b, RS.variable))] = graph.value( b, RS.value ) self.bindings.append(sol) elif type_ == "ASK": self.askAnswer = askAnswer.value if askAnswer.value is None: raise Exception("Malformed boolean in ask answer!") elif type_ == "CONSTRUCT": self.graph = g rdflib-6.1.1/rdflib/plugins/sparql/results/tsvresults.py000066400000000000000000000054411415774155300235370ustar00rootroot00000000000000""" This implements the Tab Separated SPARQL Result Format It is implemented with pyparsing, reusing the elements from the SPARQL Parser """ import codecs from pyparsing import ( Optional, ZeroOrMore, Literal, ParserElement, ParseException, Suppress, FollowedBy, LineEnd, ) from rdflib.query import Result, ResultParser from rdflib.plugins.sparql.parser import ( Var, STRING_LITERAL1, STRING_LITERAL2, IRIREF, BLANK_NODE_LABEL, NumericLiteral, BooleanLiteral, LANGTAG, ) from rdflib.plugins.sparql.parserutils import Comp, Param, CompValue from rdflib import Literal as RDFLiteral ParserElement.setDefaultWhitespaceChars(" \n") String = STRING_LITERAL1 | STRING_LITERAL2 RDFLITERAL = Comp( "literal", Param("string", String) + Optional( Param("lang", LANGTAG.leaveWhitespace()) | Literal("^^").leaveWhitespace() + Param("datatype", IRIREF).leaveWhitespace() ), ) NONE_VALUE = object() EMPTY = FollowedBy(LineEnd()) | FollowedBy("\t") EMPTY.setParseAction(lambda x: NONE_VALUE) TERM = RDFLITERAL | IRIREF | BLANK_NODE_LABEL | NumericLiteral | BooleanLiteral ROW = (EMPTY | TERM) + ZeroOrMore(Suppress("\t") + (EMPTY | TERM)) ROW.parseWithTabs() HEADER = Var + ZeroOrMore(Suppress("\t") + Var) HEADER.parseWithTabs() class TSVResultParser(ResultParser): def parse(self, source, content_type=None): if isinstance(source.read(0), bytes): # if reading from source returns bytes do utf-8 decoding source = codecs.getreader("utf-8")(source) try: r = Result("SELECT") header = source.readline() r.vars = list(HEADER.parseString(header.strip(), parseAll=True)) r.bindings = [] while True: line = source.readline() if not line: break line = line.strip("\n") if line == "": continue row = ROW.parseString(line, parseAll=True) r.bindings.append(dict(zip(r.vars, (self.convertTerm(x) for x in row)))) return r except ParseException as err: print(err.line) print(" " * (err.column - 1) + "^") print(err) def convertTerm(self, t): if t is NONE_VALUE: return None if isinstance(t, CompValue): if t.name == "literal": return RDFLiteral(t.string, lang=t.lang, datatype=t.datatype) else: raise Exception("I dont know how to handle this: %s" % (t,)) else: return t if __name__ == "__main__": import sys r = Result.parse(source=sys.argv[1], format="tsv") print(r.vars) print(r.bindings) # print r.serialize(format='json') rdflib-6.1.1/rdflib/plugins/sparql/results/txtresults.py000066400000000000000000000042301415774155300235350ustar00rootroot00000000000000from typing import IO, List, Optional from rdflib import URIRef, BNode, Literal from rdflib.query import ResultSerializer from rdflib.namespace import NamespaceManager from rdflib.term import Variable def _termString(t, namespace_manager: Optional[NamespaceManager]): if t is None: return "-" if namespace_manager: if isinstance(t, URIRef): return namespace_manager.normalizeUri(t) elif isinstance(t, BNode): return t.n3() elif isinstance(t, Literal): return t._literal_n3(qname_callback=namespace_manager.normalizeUri) else: return t.n3() class TXTResultSerializer(ResultSerializer): """ A write only QueryResult serializer for text/ascii tables """ # TODO FIXME: class specific args should be keyword only. def serialize( # type: ignore[override] self, stream: IO, encoding: str, namespace_manager: Optional[NamespaceManager] = None, ): """ return a text table of query results """ def c(s, w): """ center the string s in w wide string """ w -= len(s) h1 = h2 = w // 2 if w % 2: h2 += 1 return " " * h1 + s + " " * h2 if self.result.type != "SELECT": raise Exception("Can only pretty print SELECT results!") if not self.result: return "(no results)\n" else: keys: List[Variable] = self.result.vars # type: ignore[assignment] maxlen = [0] * len(keys) b = [ [_termString(r[k], namespace_manager) for k in keys] for r in self.result ] for r in b: for i in range(len(keys)): maxlen[i] = max(maxlen[i], len(r[i])) stream.write("|".join([c(k, maxlen[i]) for i, k in enumerate(keys)]) + "\n") stream.write("-" * (len(maxlen) + sum(maxlen)) + "\n") for r in sorted(b): stream.write( "|".join([t + " " * (i - len(t)) for i, t in zip(maxlen, r)]) + "\n" ) rdflib-6.1.1/rdflib/plugins/sparql/results/xmlresults.py000066400000000000000000000176441415774155300235330ustar00rootroot00000000000000import logging from typing import IO, Optional from xml.sax.saxutils import XMLGenerator from xml.dom import XML_NAMESPACE from xml.sax.xmlreader import AttributesNSImpl from rdflib.compat import etree from rdflib import Literal, URIRef, BNode, Variable from rdflib.query import Result, ResultParser, ResultSerializer, ResultException SPARQL_XML_NAMESPACE = "http://www.w3.org/2005/sparql-results#" RESULTS_NS_ET = "{%s}" % SPARQL_XML_NAMESPACE log = logging.getLogger(__name__) """A Parser for SPARQL results in XML: http://www.w3.org/TR/rdf-sparql-XMLres/ Bits and pieces borrowed from: http://projects.bigasterisk.com/sparqlhttp/ Authors: Drew Perttula, Gunnar Aastrand Grimnes """ class XMLResultParser(ResultParser): # TODO FIXME: content_type should be a keyword only arg. def parse(self, source, content_type: Optional[str] = None): # type: ignore[override] return XMLResult(source) class XMLResult(Result): def __init__(self, source, content_type: Optional[str] = None): try: # try use as if etree is from lxml, and if not use it as normal. parser = etree.XMLParser(huge_tree=True) # type: ignore[call-arg] tree = etree.parse(source, parser) except TypeError: tree = etree.parse(source) boolean = tree.find(RESULTS_NS_ET + "boolean") results = tree.find(RESULTS_NS_ET + "results") if boolean is not None: type_ = "ASK" elif results is not None: type_ = "SELECT" else: raise ResultException("No RDF result-bindings or boolean answer found!") Result.__init__(self, type_) if type_ == "SELECT": self.bindings = [] for result in results: # type: ignore[union-attr] r = {} for binding in result: r[Variable(binding.get("name"))] = parseTerm(binding[0]) self.bindings.append(r) self.vars = [ Variable(x.get("name")) for x in tree.findall( "./%shead/%svariable" % (RESULTS_NS_ET, RESULTS_NS_ET) ) ] else: self.askAnswer = boolean.text.lower().strip() == "true" # type: ignore[union-attr] def parseTerm(element): """rdflib object (Literal, URIRef, BNode) for the given elementtree element""" tag, text = element.tag, element.text if tag == RESULTS_NS_ET + "literal": if text is None: text = "" datatype = None lang = None if element.get("datatype", None): datatype = URIRef(element.get("datatype")) elif element.get("{%s}lang" % XML_NAMESPACE, None): lang = element.get("{%s}lang" % XML_NAMESPACE) ret = Literal(text, datatype=datatype, lang=lang) return ret elif tag == RESULTS_NS_ET + "uri": return URIRef(text) elif tag == RESULTS_NS_ET + "bnode": return BNode(text) else: raise TypeError("unknown binding type %r" % element) class XMLResultSerializer(ResultSerializer): def __init__(self, result): ResultSerializer.__init__(self, result) def serialize(self, stream: IO, encoding: str = "utf-8", **kwargs): writer = SPARQLXMLWriter(stream, encoding) if self.result.type == "ASK": writer.write_header([]) writer.write_ask(self.result.askAnswer) else: writer.write_header(self.result.vars) writer.write_results_header() for b in self.result.bindings: writer.write_start_result() for key, val in b.items(): writer.write_binding(key, val) writer.write_end_result() writer.close() # TODO: Rewrite with ElementTree? class SPARQLXMLWriter: """ Python saxutils-based SPARQL XML Writer """ def __init__(self, output, encoding="utf-8"): writer = XMLGenerator(output, encoding) writer.startDocument() writer.startPrefixMapping("", SPARQL_XML_NAMESPACE) writer.startPrefixMapping("xml", XML_NAMESPACE) writer.startElementNS( (SPARQL_XML_NAMESPACE, "sparql"), "sparql", AttributesNSImpl({}, {}) ) self.writer = writer self._output = output self._encoding = encoding self._results = False def write_header(self, allvarsL): self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "head"), "head", AttributesNSImpl({}, {}) ) for i in range(0, len(allvarsL)): attr_vals = { (None, "name"): str(allvarsL[i]), } attr_qnames = { (None, "name"): "name", } self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "variable"), "variable", AttributesNSImpl(attr_vals, attr_qnames), ) self.writer.endElementNS((SPARQL_XML_NAMESPACE, "variable"), "variable") self.writer.endElementNS((SPARQL_XML_NAMESPACE, "head"), "head") def write_ask(self, val): self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "boolean"), "boolean", AttributesNSImpl({}, {}) ) self.writer.characters(str(val).lower()) self.writer.endElementNS((SPARQL_XML_NAMESPACE, "boolean"), "boolean") def write_results_header(self): self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "results"), "results", AttributesNSImpl({}, {}) ) self._results = True def write_start_result(self): self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "result"), "result", AttributesNSImpl({}, {}) ) self._resultStarted = True def write_end_result(self): assert self._resultStarted self.writer.endElementNS((SPARQL_XML_NAMESPACE, "result"), "result") self._resultStarted = False def write_binding(self, name, val): assert self._resultStarted attr_vals = { (None, "name"): str(name), } attr_qnames = { (None, "name"): "name", } self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "binding"), "binding", AttributesNSImpl(attr_vals, attr_qnames), ) if isinstance(val, URIRef): self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "uri"), "uri", AttributesNSImpl({}, {}) ) self.writer.characters(val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, "uri"), "uri") elif isinstance(val, BNode): self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "bnode"), "bnode", AttributesNSImpl({}, {}) ) self.writer.characters(val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, "bnode"), "bnode") elif isinstance(val, Literal): attr_vals = {} attr_qnames = {} if val.language: attr_vals[(XML_NAMESPACE, "lang")] = val.language attr_qnames[(XML_NAMESPACE, "lang")] = "xml:lang" elif val.datatype: attr_vals[(None, "datatype")] = val.datatype attr_qnames[(None, "datatype")] = "datatype" self.writer.startElementNS( (SPARQL_XML_NAMESPACE, "literal"), "literal", AttributesNSImpl(attr_vals, attr_qnames), ) self.writer.characters(val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, "literal"), "literal") else: raise Exception("Unsupported RDF term: %s" % val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, "binding"), "binding") def close(self): if self._results: self.writer.endElementNS((SPARQL_XML_NAMESPACE, "results"), "results") self.writer.endElementNS((SPARQL_XML_NAMESPACE, "sparql"), "sparql") self.writer.endDocument() rdflib-6.1.1/rdflib/plugins/sparql/sparql.py000066400000000000000000000253721415774155300211070ustar00rootroot00000000000000import collections import itertools import datetime import isodate from rdflib.compat import Mapping, MutableMapping from rdflib.namespace import NamespaceManager from rdflib import Variable, BNode, Graph, ConjunctiveGraph, URIRef, Literal from rdflib.term import Node from rdflib.plugins.sparql.parserutils import CompValue import rdflib.plugins.sparql class SPARQLError(Exception): def __init__(self, msg=None): Exception.__init__(self, msg) class NotBoundError(SPARQLError): def __init__(self, msg=None): SPARQLError.__init__(self, msg) class AlreadyBound(SPARQLError): """Raised when trying to bind a variable that is already bound!""" def __init__(self): SPARQLError.__init__(self) class SPARQLTypeError(SPARQLError): def __init__(self, msg): SPARQLError.__init__(self, msg) class Bindings(MutableMapping): """ A single level of a stack of variable-value bindings. Each dict keeps a reference to the dict below it, any failed lookup is propegated back In python 3.3 this could be a collections.ChainMap """ def __init__(self, outer=None, d=[]): self._d = dict(d) self.outer = outer def __getitem__(self, key): if key in self._d: return self._d[key] if not self.outer: raise KeyError() return self.outer[key] def __contains__(self, key): try: self[key] return True except KeyError: return False def __setitem__(self, key, value): self._d[key] = value def __delitem__(self, key): raise Exception("DelItem is not implemented!") def __len__(self) -> int: i = 0 d = self while d is not None: i += len(d._d) d = d.outer return i # type: ignore[unreachable] def __iter__(self): d = self while d is not None: yield from d._d d = d.outer def __str__(self): return "Bindings({" + ", ".join((k, self[k]) for k in self) + "})" def __repr__(self): return str(self) class FrozenDict(Mapping): """ An immutable hashable dict Taken from http://stackoverflow.com/a/2704866/81121 """ def __init__(self, *args, **kwargs): self._d = dict(*args, **kwargs) self._hash = None def __iter__(self): return iter(self._d) def __len__(self): return len(self._d) def __getitem__(self, key): return self._d[key] def __hash__(self): # It would have been simpler and maybe more obvious to # use hash(tuple(sorted(self._d.items()))) from this discussion # so far, but this solution is O(n). I don't know what kind of # n we are going to run into, but sometimes it's hard to resist the # urge to optimize when it will gain improved algorithmic performance. if self._hash is None: self._hash = 0 for key, value in self.items(): self._hash ^= hash(key) self._hash ^= hash(value) return self._hash def project(self, vars): return FrozenDict((x for x in self.items() if x[0] in vars)) def disjointDomain(self, other): return not bool(set(self).intersection(other)) def compatible(self, other): for k in self: try: if self[k] != other[k]: return False except KeyError: pass return True def merge(self, other): res = FrozenDict(itertools.chain(self.items(), other.items())) return res def __str__(self): return str(self._d) def __repr__(self): return repr(self._d) class FrozenBindings(FrozenDict): def __init__(self, ctx, *args, **kwargs): FrozenDict.__init__(self, *args, **kwargs) self.ctx = ctx def __getitem__(self, key): if not isinstance(key, Node): key = Variable(key) if not isinstance(key, (BNode, Variable)): return key if key not in self._d: return self.ctx.initBindings[key] else: return self._d[key] def project(self, vars): return FrozenBindings(self.ctx, (x for x in self.items() if x[0] in vars)) def merge(self, other): res = FrozenBindings(self.ctx, itertools.chain(self.items(), other.items())) return res @property def now(self): return self.ctx.now @property def bnodes(self): return self.ctx.bnodes @property def prologue(self): return self.ctx.prologue def forget(self, before, _except=None): """ return a frozen dict only of bindings made in self since before """ if not _except: _except = [] # bindings from initBindings are newer forgotten return FrozenBindings( self.ctx, ( x for x in self.items() if ( x[0] in _except or x[0] in self.ctx.initBindings or before[x[0]] is None ) ), ) def remember(self, these): """ return a frozen dict only of bindings in these """ return FrozenBindings(self.ctx, (x for x in self.items() if x[0] in these)) class QueryContext(object): """ Query context - passed along when evaluating the query """ def __init__(self, graph=None, bindings=None, initBindings=None): self.initBindings = initBindings self.bindings = Bindings(d=bindings or []) if initBindings: self.bindings.update(initBindings) if isinstance(graph, ConjunctiveGraph): self._dataset = graph if rdflib.plugins.sparql.SPARQL_DEFAULT_GRAPH_UNION: self.graph = self.dataset else: self.graph = self.dataset.default_context else: self._dataset = None self.graph = graph self.prologue = None self._now = None self.bnodes = collections.defaultdict(BNode) @property def now(self) -> datetime.datetime: if self._now is None: self._now = datetime.datetime.now(isodate.tzinfo.UTC) return self._now def clone(self, bindings=None): r = QueryContext( self._dataset if self._dataset is not None else self.graph, bindings or self.bindings, initBindings=self.initBindings, ) r.prologue = self.prologue r.graph = self.graph r.bnodes = self.bnodes return r @property def dataset(self): """ "current dataset""" if self._dataset is None: raise Exception( "You performed a query operation requiring " + "a dataset (i.e. ConjunctiveGraph), but " + "operating currently on a single graph." ) return self._dataset def load(self, source, default=False, **kwargs): def _load(graph, source): try: return graph.parse(source, format="turtle", **kwargs) except Exception: pass try: return graph.parse(source, format="xml", **kwargs) except Exception: pass try: return graph.parse(source, format="n3", **kwargs) except Exception: pass try: return graph.parse(source, format="nt", **kwargs) except Exception: raise Exception( "Could not load %s as either RDF/XML, N3 or NTriples" % source ) if not rdflib.plugins.sparql.SPARQL_LOAD_GRAPHS: # we are not loading - if we already know the graph # being "loaded", just add it to the default-graph if default: self.graph += self.dataset.get_context(source) else: if default: _load(self.graph, source) else: _load(self.dataset, source) def __getitem__(self, key): # in SPARQL BNodes are just labels if not isinstance(key, (BNode, Variable)): return key try: return self.bindings[key] except KeyError: return None def get(self, key, default=None): try: return self[key] except KeyError: return default def solution(self, vars=None): """ Return a static copy of the current variable bindings as dict """ if vars: return FrozenBindings( self, ((k, v) for k, v in self.bindings.items() if k in vars) ) else: return FrozenBindings(self, self.bindings.items()) def __setitem__(self, key, value): if key in self.bindings and self.bindings[key] != value: raise AlreadyBound() self.bindings[key] = value def pushGraph(self, graph): r = self.clone() r.graph = graph return r def push(self): r = self.clone(Bindings(self.bindings)) return r def clean(self): return self.clone([]) def thaw(self, frozenbindings): """ Create a new read/write query context from the given solution """ c = self.clone(frozenbindings) return c class Prologue: """ A class for holding prefixing bindings and base URI information """ def __init__(self): self.base = None self.namespace_manager = NamespaceManager(Graph()) # ns man needs a store def resolvePName(self, prefix, localname): ns = self.namespace_manager.store.namespace(prefix or "") if ns is None: raise Exception("Unknown namespace prefix : %s" % prefix) return URIRef(ns + (localname or "")) def bind(self, prefix, uri): self.namespace_manager.bind(prefix, uri, replace=True) def absolutize(self, iri): """ Apply BASE / PREFIXes to URIs (and to datatypes in Literals) TODO: Move resolving URIs to pre-processing """ if isinstance(iri, CompValue): if iri.name == "pname": return self.resolvePName(iri.prefix, iri.localname) if iri.name == "literal": return Literal( iri.string, lang=iri.lang, datatype=self.absolutize(iri.datatype) ) elif isinstance(iri, URIRef) and not ":" in iri: return URIRef(iri, base=self.base) return iri class Query: """ A parsed and translated query """ def __init__(self, prologue, algebra): self.prologue = prologue self.algebra = algebra rdflib-6.1.1/rdflib/plugins/sparql/update.py000066400000000000000000000171711415774155300210650ustar00rootroot00000000000000""" Code for carrying out Update Operations """ from rdflib import Graph, Variable from rdflib.plugins.sparql.sparql import QueryContext from rdflib.plugins.sparql.evalutils import _fillTemplate, _join from rdflib.plugins.sparql.evaluate import evalBGP, evalPart def _graphOrDefault(ctx, g): if g == "DEFAULT": return ctx.graph else: return ctx.dataset.get_context(g) def _graphAll(ctx, g): """ return a list of graphs """ if g == "DEFAULT": return [ctx.graph] elif g == "NAMED": return [ c for c in ctx.dataset.contexts() if c.identifier != ctx.graph.identifier ] elif g == "ALL": return list(ctx.dataset.contexts()) else: return [ctx.dataset.get_context(g)] def evalLoad(ctx, u): """ http://www.w3.org/TR/sparql11-update/#load """ if u.graphiri: ctx.load(u.iri, default=False, publicID=u.graphiri) else: ctx.load(u.iri, default=True) def evalCreate(ctx, u): """ http://www.w3.org/TR/sparql11-update/#create """ g = ctx.dataset.get_context(u.graphiri) if len(g) > 0: raise Exception("Graph %s already exists." % g.identifier) raise Exception("Create not implemented!") def evalClear(ctx, u): """ http://www.w3.org/TR/sparql11-update/#clear """ for g in _graphAll(ctx, u.graphiri): g.remove((None, None, None)) def evalDrop(ctx, u): """ http://www.w3.org/TR/sparql11-update/#drop """ if ctx.dataset.store.graph_aware: for g in _graphAll(ctx, u.graphiri): ctx.dataset.store.remove_graph(g) else: evalClear(ctx, u) def evalInsertData(ctx, u): """ http://www.w3.org/TR/sparql11-update/#insertData """ # add triples g = ctx.graph g += u.triples # add quads # u.quads is a dict of graphURI=>[triples] for g in u.quads: cg = ctx.dataset.get_context(g) cg += u.quads[g] def evalDeleteData(ctx, u): """ http://www.w3.org/TR/sparql11-update/#deleteData """ # remove triples g = ctx.graph g -= u.triples # remove quads # u.quads is a dict of graphURI=>[triples] for g in u.quads: cg = ctx.dataset.get_context(g) cg -= u.quads[g] def evalDeleteWhere(ctx, u): """ http://www.w3.org/TR/sparql11-update/#deleteWhere """ res = evalBGP(ctx, u.triples) for g in u.quads: cg = ctx.dataset.get_context(g) c = ctx.pushGraph(cg) res = _join(res, list(evalBGP(c, u.quads[g]))) for c in res: g = ctx.graph g -= _fillTemplate(u.triples, c) for g in u.quads: cg = ctx.dataset.get_context(c.get(g)) cg -= _fillTemplate(u.quads[g], c) def evalModify(ctx, u): originalctx = ctx # Using replaces the dataset for evaluating the where-clause if u.using: otherDefault = False for d in u.using: if d.default: if not otherDefault: # replace current default graph dg = Graph() ctx = ctx.pushGraph(dg) otherDefault = True ctx.load(d.default, default=True) elif d.named: g = d.named ctx.load(g, default=False) # "The WITH clause provides a convenience for when an operation # primarily refers to a single graph. If a graph name is specified # in a WITH clause, then - for the purposes of evaluating the # WHERE clause - this will define an RDF Dataset containing a # default graph with the specified name, but only in the absence # of USING or USING NAMED clauses. In the presence of one or more # graphs referred to in USING clauses and/or USING NAMED clauses, # the WITH clause will be ignored while evaluating the WHERE # clause." if not u.using and u.withClause: g = ctx.dataset.get_context(u.withClause) ctx = ctx.pushGraph(g) res = evalPart(ctx, u.where) if u.using: if otherDefault: ctx = originalctx # restore original default graph if u.withClause: g = ctx.dataset.get_context(u.withClause) ctx = ctx.pushGraph(g) for c in res: dg = ctx.graph if u.delete: dg -= _fillTemplate(u.delete.triples, c) for g, q in u.delete.quads.items(): cg = ctx.dataset.get_context(c.get(g)) cg -= _fillTemplate(q, c) if u.insert: dg += _fillTemplate(u.insert.triples, c) for g, q in u.insert.quads.items(): cg = ctx.dataset.get_context(c.get(g)) cg += _fillTemplate(q, c) def evalAdd(ctx, u): """ add all triples from src to dst http://www.w3.org/TR/sparql11-update/#add """ src, dst = u.graph srcg = _graphOrDefault(ctx, src) dstg = _graphOrDefault(ctx, dst) if srcg.identifier == dstg.identifier: return dstg += srcg def evalMove(ctx, u): """ remove all triples from dst add all triples from src to dst remove all triples from src http://www.w3.org/TR/sparql11-update/#move """ src, dst = u.graph srcg = _graphOrDefault(ctx, src) dstg = _graphOrDefault(ctx, dst) if srcg.identifier == dstg.identifier: return dstg.remove((None, None, None)) dstg += srcg if ctx.dataset.store.graph_aware: ctx.dataset.store.remove_graph(srcg) else: srcg.remove((None, None, None)) def evalCopy(ctx, u): """ remove all triples from dst add all triples from src to dst http://www.w3.org/TR/sparql11-update/#copy """ src, dst = u.graph srcg = _graphOrDefault(ctx, src) dstg = _graphOrDefault(ctx, dst) if srcg.identifier == dstg.identifier: return dstg.remove((None, None, None)) dstg += srcg def evalUpdate(graph, update, initBindings={}): """ http://www.w3.org/TR/sparql11-update/#updateLanguage 'A request is a sequence of operations [...] Implementations MUST ensure that operations of a single request are executed in a fashion that guarantees the same effects as executing them in lexical order. Operations all result either in success or failure. If multiple operations are present in a single request, then a result of failure from any operation MUST abort the sequence of operations, causing the subsequent operations to be ignored.' This will return None on success and raise Exceptions on error """ for u in update: initBindings = dict((Variable(k), v) for k, v in initBindings.items()) ctx = QueryContext(graph, initBindings=initBindings) ctx.prologue = u.prologue try: if u.name == "Load": evalLoad(ctx, u) elif u.name == "Clear": evalClear(ctx, u) elif u.name == "Drop": evalDrop(ctx, u) elif u.name == "Create": evalCreate(ctx, u) elif u.name == "Add": evalAdd(ctx, u) elif u.name == "Move": evalMove(ctx, u) elif u.name == "Copy": evalCopy(ctx, u) elif u.name == "InsertData": evalInsertData(ctx, u) elif u.name == "DeleteData": evalDeleteData(ctx, u) elif u.name == "DeleteWhere": evalDeleteWhere(ctx, u) elif u.name == "Modify": evalModify(ctx, u) else: raise Exception("Unknown update operation: %s" % (u,)) except: if not u.silent: raise rdflib-6.1.1/rdflib/plugins/stores/000077500000000000000000000000001415774155300172375ustar00rootroot00000000000000rdflib-6.1.1/rdflib/plugins/stores/__init__.py000066400000000000000000000001031415774155300213420ustar00rootroot00000000000000""" This package contains modules for additional RDFLib stores """ rdflib-6.1.1/rdflib/plugins/stores/auditable.py000066400000000000000000000135711415774155300215520ustar00rootroot00000000000000""" This wrapper intercepts calls through the store interface and implements thread-safe logging of destructive operations (adds / removes) in reverse. This is persisted on the store instance and the reverse operations are executed In order to return the store to the state it was when the transaction began Since the reverse operations are persisted on the store, the store itself acts as a transaction. Calls to commit or rollback, flush the list of reverse operations This provides thread-safe atomicity and isolation (assuming concurrent operations occur with different store instances), but no durability (transactions are persisted in memory and wont be available to reverse operations after the system fails): A and I out of ACID. """ from rdflib.store import Store from rdflib import Graph, ConjunctiveGraph import threading destructiveOpLocks = { "add": None, "remove": None, } class AuditableStore(Store): def __init__(self, store): self.store = store self.context_aware = store.context_aware # NOTE: this store can't be formula_aware as it doesn't have enough # info to reverse the removal of a quoted statement self.formula_aware = False # store.formula_aware self.transaction_aware = True # This is only half true self.reverseOps = [] self.rollbackLock = threading.RLock() def open(self, configuration, create=True): return self.store.open(configuration, create) def close(self, commit_pending_transaction=False): self.store.close() def destroy(self, configuration): self.store.destroy(configuration) def query(self, *args, **kw): return self.store.query(*args, **kw) def add(self, triple, context, quoted=False): (s, p, o) = triple lock = destructiveOpLocks["add"] lock = lock if lock else threading.RLock() with lock: context = ( context.__class__(self.store, context.identifier) if context is not None else None ) ctxId = context.identifier if context is not None else None if list(self.store.triples(triple, context)): return # triple already in store, do nothing self.reverseOps.append((s, p, o, ctxId, "remove")) try: self.reverseOps.remove((s, p, o, ctxId, "add")) except ValueError: pass self.store.add((s, p, o), context, quoted) def remove(self, spo, context=None): subject, predicate, object_ = spo lock = destructiveOpLocks["remove"] lock = lock if lock else threading.RLock() with lock: # Need to determine which quads will be removed if any term is a # wildcard context = ( context.__class__(self.store, context.identifier) if context is not None else None ) ctxId = context.identifier if context is not None else None if None in [subject, predicate, object_, context]: if ctxId: for s, p, o in context.triples((subject, predicate, object_)): try: self.reverseOps.remove((s, p, o, ctxId, "remove")) except ValueError: self.reverseOps.append((s, p, o, ctxId, "add")) else: for s, p, o, ctx in ConjunctiveGraph(self.store).quads( (subject, predicate, object_) ): try: self.reverseOps.remove((s, p, o, ctx.identifier, "remove")) except ValueError: self.reverseOps.append((s, p, o, ctx.identifier, "add")) else: if not list(self.triples((subject, predicate, object_), context)): return # triple not present in store, do nothing try: self.reverseOps.remove( (subject, predicate, object_, ctxId, "remove") ) except ValueError: self.reverseOps.append((subject, predicate, object_, ctxId, "add")) self.store.remove((subject, predicate, object_), context) def triples(self, triple, context=None): (su, pr, ob) = triple context = ( context.__class__(self.store, context.identifier) if context is not None else None ) for (s, p, o), cg in self.store.triples((su, pr, ob), context): yield (s, p, o), cg def __len__(self, context=None): context = ( context.__class__(self.store, context.identifier) if context is not None else None ) return self.store.__len__(context) def contexts(self, triple=None): for ctx in self.store.contexts(triple): yield ctx def bind(self, prefix, namespace): self.store.bind(prefix, namespace) def prefix(self, namespace): return self.store.prefix(namespace) def namespace(self, prefix): return self.store.namespace(prefix) def namespaces(self): return self.store.namespaces() def commit(self): self.reverseOps = [] def rollback(self): # Acquire Rollback lock and apply reverse operations in the forward # order with self.rollbackLock: for subject, predicate, obj, context, op in self.reverseOps: if op == "add": self.store.add( (subject, predicate, obj), Graph(self.store, context) ) else: self.store.remove( (subject, predicate, obj), Graph(self.store, context) ) self.reverseOps = [] rdflib-6.1.1/rdflib/plugins/stores/berkeleydb.py000066400000000000000000000527171415774155300217350ustar00rootroot00000000000000import logging from threading import Thread from os.path import exists, abspath from os import mkdir from rdflib.store import Store, VALID_STORE, NO_STORE from rdflib.term import URIRef from urllib.request import pathname2url def bb(u): return u.encode("utf-8") try: from berkeleydb import db has_bsddb = True except ImportError: has_bsddb = False if has_bsddb: # These are passed to bsddb when creating DBs # passed to db.DBEnv.set_flags ENVSETFLAGS = db.DB_CDB_ALLDB # passed to db.DBEnv.open ENVFLAGS = db.DB_INIT_MPOOL | db.DB_INIT_CDB | db.DB_THREAD CACHESIZE = 1024 * 1024 * 50 # passed to db.DB.Open() DBOPENFLAGS = db.DB_THREAD logger = logging.getLogger(__name__) __all__ = ["BerkeleyDB"] class BerkeleyDB(Store): """\ A store that allows for on-disk persistent using BerkeleyDB, a fast key/value DB. This store implementation used to be known, previous to rdflib 6.0.0 as 'Sleepycat' due to that being the then name of the Python wrapper for BerkeleyDB. This store allows for quads as well as triples. See examples of use in both the `examples.berkeleydb_example` and `test.test_store_berkeleydb` files. **NOTE on installation**: To use this store, you must have BerkeleyDB installed on your system separately to Python (`brew install berkeley-db` on a Mac) and also have the BerkeleyDB Python wrapper installed (`pip install berkeleydb`). You may need to install BerkeleyDB Python wrapper like this: `YES_I_HAVE_THE_RIGHT_TO_USE_THIS_BERKELEY_DB_VERSION=1 pip install berkeleydb` """ context_aware = True formula_aware = True transaction_aware = False graph_aware = True db_env = None def __init__(self, configuration=None, identifier=None): if not has_bsddb: raise ImportError("Unable to import berkeleydb, store is unusable.") self.__open = False self.__identifier = identifier super(BerkeleyDB, self).__init__(configuration) self._loads = self.node_pickler.loads self._dumps = self.node_pickler.dumps def __get_identifier(self): return self.__identifier identifier = property(__get_identifier) def _init_db_environment(self, homeDir, create=True): if not exists(homeDir): if create is True: mkdir(homeDir) # TODO: implement create method and refactor this to it self.create(homeDir) else: return NO_STORE db_env = db.DBEnv() db_env.set_cachesize(0, CACHESIZE) # TODO # db_env.set_lg_max(1024*1024) db_env.set_flags(ENVSETFLAGS, 1) db_env.open(homeDir, ENVFLAGS | db.DB_CREATE) return db_env def is_open(self): return self.__open def open(self, path, create=True): if not has_bsddb: return NO_STORE homeDir = path if self.__identifier is None: self.__identifier = URIRef(pathname2url(abspath(homeDir))) db_env = self._init_db_environment(homeDir, create) if db_env == NO_STORE: return NO_STORE self.db_env = db_env self.__open = True dbname = None dbtype = db.DB_BTREE # auto-commit ensures that the open-call commits when transactions # are enabled dbopenflags = DBOPENFLAGS if self.transaction_aware is True: dbopenflags |= db.DB_AUTO_COMMIT if create: dbopenflags |= db.DB_CREATE dbmode = 0o660 dbsetflags = 0 # create and open the DBs self.__indicies = [ None, ] * 3 self.__indicies_info = [ None, ] * 3 for i in range(0, 3): index_name = to_key_func(i)( ("s".encode("latin-1"), "p".encode("latin-1"), "o".encode("latin-1")), "c".encode("latin-1"), ).decode() index = db.DB(db_env) index.set_flags(dbsetflags) index.open(index_name, dbname, dbtype, dbopenflags, dbmode) self.__indicies[i] = index self.__indicies_info[i] = (index, to_key_func(i), from_key_func(i)) lookup = {} for i in range(0, 8): results = [] for start in range(0, 3): score = 1 len = 0 for j in range(start, start + 3): if i & (1 << (j % 3)): score = score << 1 len += 1 else: break tie_break = 2 - start results.append(((score, tie_break), start, len)) results.sort() score, start, len = results[-1] def get_prefix_func(start, end): def get_prefix(triple, context): if context is None: yield "" else: yield context i = start while i < end: yield triple[i % 3] i += 1 yield "" return get_prefix lookup[i] = ( self.__indicies[start], get_prefix_func(start, start + len), from_key_func(start), results_from_key_func(start, self._from_string), ) self.__lookup_dict = lookup self.__contexts = db.DB(db_env) self.__contexts.set_flags(dbsetflags) self.__contexts.open("contexts", dbname, dbtype, dbopenflags, dbmode) self.__namespace = db.DB(db_env) self.__namespace.set_flags(dbsetflags) self.__namespace.open("namespace", dbname, dbtype, dbopenflags, dbmode) self.__prefix = db.DB(db_env) self.__prefix.set_flags(dbsetflags) self.__prefix.open("prefix", dbname, dbtype, dbopenflags, dbmode) self.__k2i = db.DB(db_env) self.__k2i.set_flags(dbsetflags) self.__k2i.open("k2i", dbname, db.DB_HASH, dbopenflags, dbmode) self.__i2k = db.DB(db_env) self.__i2k.set_flags(dbsetflags) self.__i2k.open("i2k", dbname, db.DB_RECNO, dbopenflags, dbmode) self.__needs_sync = False t = Thread(target=self.__sync_run) t.setDaemon(True) t.start() self.__sync_thread = t return VALID_STORE def __sync_run(self): from time import sleep, time try: min_seconds, max_seconds = 10, 300 while self.__open: if self.__needs_sync: t0 = t1 = time() self.__needs_sync = False while self.__open: sleep(0.1) if self.__needs_sync: t1 = time() self.__needs_sync = False if time() - t1 > min_seconds or time() - t0 > max_seconds: self.__needs_sync = False logger.debug("sync") self.sync() break else: sleep(1) except Exception as e: logger.exception(e) def sync(self): if self.__open: for i in self.__indicies: i.sync() self.__contexts.sync() self.__namespace.sync() self.__prefix.sync() self.__i2k.sync() self.__k2i.sync() def close(self, commit_pending_transaction=False): self.__open = False self.__sync_thread.join() for i in self.__indicies: i.close() self.__contexts.close() self.__namespace.close() self.__prefix.close() self.__i2k.close() self.__k2i.close() self.db_env.close() def add(self, triple, context, quoted=False, txn=None): """\ Add a triple to the store of triples. """ (subject, predicate, object) = triple assert self.__open, "The Store must be open." assert context != self, "Can not add triple directly to store" Store.add(self, (subject, predicate, object), context, quoted) _to_string = self._to_string s = _to_string(subject, txn=txn) p = _to_string(predicate, txn=txn) o = _to_string(object, txn=txn) c = _to_string(context, txn=txn) cspo, cpos, cosp = self.__indicies value = cspo.get(bb("%s^%s^%s^%s^" % (c, s, p, o)), txn=txn) if value is None: self.__contexts.put(bb(c), b"", txn=txn) contexts_value = cspo.get( bb("%s^%s^%s^%s^" % ("", s, p, o)), txn=txn ) or "".encode("latin-1") contexts = set(contexts_value.split("^".encode("latin-1"))) contexts.add(bb(c)) contexts_value = "^".encode("latin-1").join(contexts) assert contexts_value is not None cspo.put(bb("%s^%s^%s^%s^" % (c, s, p, o)), b"", txn=txn) cpos.put(bb("%s^%s^%s^%s^" % (c, p, o, s)), b"", txn=txn) cosp.put(bb("%s^%s^%s^%s^" % (c, o, s, p)), b"", txn=txn) if not quoted: cspo.put(bb("%s^%s^%s^%s^" % ("", s, p, o)), contexts_value, txn=txn) cpos.put(bb("%s^%s^%s^%s^" % ("", p, o, s)), contexts_value, txn=txn) cosp.put(bb("%s^%s^%s^%s^" % ("", o, s, p)), contexts_value, txn=txn) self.__needs_sync = True def __remove(self, spo, c, quoted=False, txn=None): s, p, o = spo cspo, cpos, cosp = self.__indicies contexts_value = ( cspo.get( "^".encode("latin-1").join( ["".encode("latin-1"), s, p, o, "".encode("latin-1")] ), txn=txn, ) or "".encode("latin-1") ) contexts = set(contexts_value.split("^".encode("latin-1"))) contexts.discard(c) contexts_value = "^".encode("latin-1").join(contexts) for i, _to_key, _from_key in self.__indicies_info: i.delete(_to_key((s, p, o), c), txn=txn) if not quoted: if contexts_value: for i, _to_key, _from_key in self.__indicies_info: i.put( _to_key((s, p, o), "".encode("latin-1")), contexts_value, txn=txn, ) else: for i, _to_key, _from_key in self.__indicies_info: try: i.delete(_to_key((s, p, o), "".encode("latin-1")), txn=txn) except db.DBNotFoundError: pass # TODO: is it okay to ignore these? def remove(self, spo, context, txn=None): subject, predicate, object = spo assert self.__open, "The Store must be open." Store.remove(self, (subject, predicate, object), context) _to_string = self._to_string if context is not None: if context == self: context = None if ( subject is not None and predicate is not None and object is not None and context is not None ): s = _to_string(subject, txn=txn) p = _to_string(predicate, txn=txn) o = _to_string(object, txn=txn) c = _to_string(context, txn=txn) value = self.__indicies[0].get(bb("%s^%s^%s^%s^" % (c, s, p, o)), txn=txn) if value is not None: self.__remove((bb(s), bb(p), bb(o)), bb(c), txn=txn) self.__needs_sync = True else: cspo, cpos, cosp = self.__indicies index, prefix, from_key, results_from_key = self.__lookup( (subject, predicate, object), context, txn=txn ) cursor = index.cursor(txn=txn) try: current = cursor.set_range(prefix) needs_sync = True except db.DBNotFoundError: current = None needs_sync = False cursor.close() while current: key, value = current cursor = index.cursor(txn=txn) try: cursor.set_range(key) # Hack to stop 2to3 converting this to next(cursor) current = getattr(cursor, "next")() except db.DBNotFoundError: current = None cursor.close() if key.startswith(prefix): c, s, p, o = from_key(key) if context is None: contexts_value = index.get(key, txn=txn) or "".encode("latin-1") # remove triple from all non quoted contexts contexts = set(contexts_value.split("^".encode("latin-1"))) # and from the conjunctive index contexts.add("".encode("latin-1")) for c in contexts: for i, _to_key, _ in self.__indicies_info: i.delete(_to_key((s, p, o), c), txn=txn) else: self.__remove((s, p, o), c, txn=txn) else: break if context is not None: if subject is None and predicate is None and object is None: # TODO: also if context becomes empty and not just on # remove((None, None, None), c) try: self.__contexts.delete( bb(_to_string(context, txn=txn)), txn=txn ) except db.DBNotFoundError: pass self.__needs_sync = needs_sync def triples(self, spo, context=None, txn=None): """A generator over all the triples matching""" assert self.__open, "The Store must be open." subject, predicate, object = spo if context is not None: if context == self: context = None # _from_string = self._from_string ## UNUSED index, prefix, from_key, results_from_key = self.__lookup( (subject, predicate, object), context, txn=txn ) cursor = index.cursor(txn=txn) try: current = cursor.set_range(prefix) except db.DBNotFoundError: current = None cursor.close() while current: key, value = current cursor = index.cursor(txn=txn) try: cursor.set_range(key) # Cheap hack so 2to3 doesn't convert to next(cursor) current = getattr(cursor, "next")() except db.DBNotFoundError: current = None cursor.close() if key and key.startswith(prefix): contexts_value = index.get(key, txn=txn) yield results_from_key(key, subject, predicate, object, contexts_value) else: break def __len__(self, context=None): assert self.__open, "The Store must be open." if context is not None: if context == self: context = None if context is None: prefix = "^".encode("latin-1") else: prefix = bb("%s^" % self._to_string(context)) index = self.__indicies[0] cursor = index.cursor() current = cursor.set_range(prefix) count = 0 while current: key, value = current if key.startswith(prefix): count += 1 # Hack to stop 2to3 converting this to next(cursor) current = getattr(cursor, "next")() else: break cursor.close() return count def bind(self, prefix, namespace): prefix = prefix.encode("utf-8") namespace = namespace.encode("utf-8") bound_prefix = self.__prefix.get(namespace) if bound_prefix: self.__namespace.delete(bound_prefix) self.__prefix[namespace] = prefix self.__namespace[prefix] = namespace def namespace(self, prefix): prefix = prefix.encode("utf-8") ns = self.__namespace.get(prefix, None) if ns is not None: return URIRef(ns.decode("utf-8")) return None def prefix(self, namespace): namespace = namespace.encode("utf-8") prefix = self.__prefix.get(namespace, None) if prefix is not None: return prefix.decode("utf-8") return None def namespaces(self): cursor = self.__namespace.cursor() results = [] current = cursor.first() while current: prefix, namespace = current results.append((prefix.decode("utf-8"), namespace.decode("utf-8"))) # Hack to stop 2to3 converting this to next(cursor) current = getattr(cursor, "next")() cursor.close() for prefix, namespace in results: yield prefix, URIRef(namespace) def contexts(self, triple=None): _from_string = self._from_string _to_string = self._to_string if triple: s, p, o = triple s = _to_string(s) p = _to_string(p) o = _to_string(o) contexts = self.__indicies[0].get(bb("%s^%s^%s^%s^" % ("", s, p, o))) if contexts: for c in contexts.split("^".encode("latin-1")): if c: yield _from_string(c) else: index = self.__contexts cursor = index.cursor() current = cursor.first() cursor.close() while current: key, value = current context = _from_string(key) yield context cursor = index.cursor() try: cursor.set_range(key) # Hack to stop 2to3 converting this to next(cursor) current = getattr(cursor, "next")() except db.DBNotFoundError: current = None cursor.close() def add_graph(self, graph): self.__contexts.put(bb(self._to_string(graph)), b"") def remove_graph(self, graph): self.remove((None, None, None), graph) def _from_string(self, i): k = self.__i2k.get(int(i)) return self._loads(k) def _to_string(self, term, txn=None): k = self._dumps(term) i = self.__k2i.get(k, txn=txn) if i is None: # weird behaviour from bsddb not taking a txn as a keyword argument # for append if self.transaction_aware: i = "%s" % self.__i2k.append(k, txn) else: i = "%s" % self.__i2k.append(k) self.__k2i.put(k, i.encode(), txn=txn) else: i = i.decode() return i def __lookup(self, spo, context, txn=None): subject, predicate, object = spo _to_string = self._to_string if context is not None: context = _to_string(context, txn=txn) i = 0 if subject is not None: i += 1 subject = _to_string(subject, txn=txn) if predicate is not None: i += 2 predicate = _to_string(predicate, txn=txn) if object is not None: i += 4 object = _to_string(object, txn=txn) index, prefix_func, from_key, results_from_key = self.__lookup_dict[i] # print (subject, predicate, object), context, prefix_func, index # #DEBUG prefix = bb("^".join(prefix_func((subject, predicate, object), context))) return index, prefix, from_key, results_from_key def to_key_func(i): def to_key(triple, context): "Takes a string; returns key" return "^".encode("latin-1").join( ( context, triple[i % 3], triple[(i + 1) % 3], triple[(i + 2) % 3], "".encode("latin-1"), ) ) # "" to tac on the trailing ^ return to_key def from_key_func(i): def from_key(key): "Takes a key; returns string" parts = key.split("^".encode("latin-1")) return ( parts[0], parts[(3 - i + 0) % 3 + 1], parts[(3 - i + 1) % 3 + 1], parts[(3 - i + 2) % 3 + 1], ) return from_key def results_from_key_func(i, from_string): def from_key(key, subject, predicate, object, contexts_value): "Takes a key and subject, predicate, object; returns tuple for yield" parts = key.split("^".encode("latin-1")) if subject is None: # TODO: i & 1: # dis assemble and/or measure to see which is faster # subject is None or i & 1 s = from_string(parts[(3 - i + 0) % 3 + 1]) else: s = subject if predicate is None: # i & 2: p = from_string(parts[(3 - i + 1) % 3 + 1]) else: p = predicate if object is None: # i & 4: o = from_string(parts[(3 - i + 2) % 3 + 1]) else: o = object return ( (s, p, o), (from_string(c) for c in contexts_value.split("^".encode("latin-1")) if c), ) return from_key def readable_index(i): s, p, o = "?" * 3 if i & 1: s = "s" if i & 2: p = "p" if i & 4: o = "o" return "%s,%s,%s" % (s, p, o) rdflib-6.1.1/rdflib/plugins/stores/concurrent.py000066400000000000000000000052251415774155300217770ustar00rootroot00000000000000from threading import Lock class ResponsibleGenerator(object): """A generator that will help clean up when it is done being used.""" __slots__ = ["cleanup", "gen"] def __init__(self, gen, cleanup): self.cleanup = cleanup self.gen = gen def __del__(self): self.cleanup() def __iter__(self): return self def __next__(self): return next(self.gen) class ConcurrentStore(object): def __init__(self, store): self.store = store # number of calls to visit still in progress self.__visit_count = 0 # lock for locking down the indices self.__lock = Lock() # lists for keeping track of added and removed triples while # we wait for the lock self.__pending_removes = [] self.__pending_adds = [] def add(self, triple): (s, p, o) = triple if self.__visit_count == 0: self.store.add((s, p, o)) else: self.__pending_adds.append((s, p, o)) def remove(self, triple): (s, p, o) = triple if self.__visit_count == 0: self.store.remove((s, p, o)) else: self.__pending_removes.append((s, p, o)) def triples(self, triple): (su, pr, ob) = triple g = self.store.triples((su, pr, ob)) pending_removes = self.__pending_removes self.__begin_read() for s, p, o in ResponsibleGenerator(g, self.__end_read): if not (s, p, o) in pending_removes: yield s, p, o for (s, p, o) in self.__pending_adds: if ( (su is None or su == s) and (pr is None or pr == p) and (ob is None or ob == o) ): yield s, p, o def __len__(self): return self.store.__len__() def __begin_read(self): lock = self.__lock lock.acquire() self.__visit_count = self.__visit_count + 1 lock.release() def __end_read(self): lock = self.__lock lock.acquire() self.__visit_count = self.__visit_count - 1 if self.__visit_count == 0: pending_removes = self.__pending_removes while pending_removes: (s, p, o) = pending_removes.pop() try: self.store.remove((s, p, o)) except: # TODO: change to try finally? print(s, p, o, "Not in store to remove") pending_adds = self.__pending_adds while pending_adds: (s, p, o) = pending_adds.pop() self.store.add((s, p, o)) lock.release() rdflib-6.1.1/rdflib/plugins/stores/memory.py000066400000000000000000000525071415774155300211320ustar00rootroot00000000000000# # from rdflib.store import Store __all__ = ["SimpleMemory", "Memory"] ANY = None class SimpleMemory(Store): """\ A fast naive in memory implementation of a triple store. This triple store uses nested dictionaries to store triples. Each triple is stored in two such indices as follows spo[s][p][o] = 1 and pos[p][o][s] = 1. Authors: Michel Pelletier, Daniel Krech, Stefan Niederhauser """ def __init__(self, configuration=None, identifier=None): super(SimpleMemory, self).__init__(configuration) self.identifier = identifier # indexed by [subject][predicate][object] self.__spo = {} # indexed by [predicate][object][subject] self.__pos = {} # indexed by [predicate][object][subject] self.__osp = {} self.__namespace = {} self.__prefix = {} def add(self, triple, context, quoted=False): """\ Add a triple to the store of triples. """ # add dictionary entries for spo[s][p][p] = 1 and pos[p][o][s] # = 1, creating the nested dictionaries where they do not yet # exits. subject, predicate, object = triple spo = self.__spo try: po = spo[subject] except: po = spo[subject] = {} try: o = po[predicate] except: o = po[predicate] = {} o[object] = 1 pos = self.__pos try: os = pos[predicate] except: os = pos[predicate] = {} try: s = os[object] except: s = os[object] = {} s[subject] = 1 osp = self.__osp try: sp = osp[object] except: sp = osp[object] = {} try: p = sp[subject] except: p = sp[subject] = {} p[predicate] = 1 def remove(self, triple_pattern, context=None): for (subject, predicate, object), c in list(self.triples(triple_pattern)): del self.__spo[subject][predicate][object] del self.__pos[predicate][object][subject] del self.__osp[object][subject][predicate] def triples(self, triple_pattern, context=None): """A generator over all the triples matching""" subject, predicate, object = triple_pattern if subject != ANY: # subject is given spo = self.__spo if subject in spo: subjectDictionary = spo[subject] if predicate != ANY: # subject+predicate is given if predicate in subjectDictionary: if object != ANY: # subject+predicate+object is given if object in subjectDictionary[predicate]: yield (subject, predicate, object), self.__contexts() else: # given object not found pass else: # subject+predicate is given, object unbound for o in subjectDictionary[predicate].keys(): yield (subject, predicate, o), self.__contexts() else: # given predicate not found pass else: # subject given, predicate unbound for p in subjectDictionary.keys(): if object != ANY: # object is given if object in subjectDictionary[p]: yield (subject, p, object), self.__contexts() else: # given object not found pass else: # object unbound for o in subjectDictionary[p].keys(): yield (subject, p, o), self.__contexts() else: # given subject not found pass elif predicate != ANY: # predicate is given, subject unbound pos = self.__pos if predicate in pos: predicateDictionary = pos[predicate] if object != ANY: # predicate+object is given, subject unbound if object in predicateDictionary: for s in predicateDictionary[object].keys(): yield (s, predicate, object), self.__contexts() else: # given object not found pass else: # predicate is given, object+subject unbound for o in predicateDictionary.keys(): for s in predicateDictionary[o].keys(): yield (s, predicate, o), self.__contexts() elif object != ANY: # object is given, subject+predicate unbound osp = self.__osp if object in osp: objectDictionary = osp[object] for s in objectDictionary.keys(): for p in objectDictionary[s].keys(): yield (s, p, object), self.__contexts() else: # subject+predicate+object unbound spo = self.__spo for s in spo.keys(): subjectDictionary = spo[s] for p in subjectDictionary.keys(): for o in subjectDictionary[p].keys(): yield (s, p, o), self.__contexts() def __len__(self, context=None): # @@ optimize i = 0 for triple in self.triples((None, None, None)): i += 1 return i def bind(self, prefix, namespace): self.__prefix[namespace] = prefix self.__namespace[prefix] = namespace def namespace(self, prefix): return self.__namespace.get(prefix, None) def prefix(self, namespace): return self.__prefix.get(namespace, None) def namespaces(self): for prefix, namespace in self.__namespace.items(): yield prefix, namespace def __contexts(self): return (c for c in []) # TODO: best way to return empty generator def query(self, query, initNs, initBindings, queryGraph, **kwargs): super(SimpleMemory, self).query( query, initNs, initBindings, queryGraph, **kwargs ) def update(self, update, initNs, initBindings, queryGraph, **kwargs): super(SimpleMemory, self).update( update, initNs, initBindings, queryGraph, **kwargs ) class Memory(Store): """\ An in memory implementation of a triple store. Same as SimpleMemory above, but is Context-aware, Graph-aware, and Formula-aware Authors: Ashley Sommer """ context_aware = True formula_aware = True graph_aware = True def __init__(self, configuration=None, identifier=None): super(Memory, self).__init__(configuration) self.identifier = identifier # indexed by [subject][predicate][object] self.__spo = {} # indexed by [predicate][object][subject] self.__pos = {} # indexed by [predicate][object][subject] self.__osp = {} self.__namespace = {} self.__prefix = {} self.__context_obj_map = {} self.__tripleContexts = {} self.__contextTriples = {None: set()} # all contexts used in store (unencoded) self.__all_contexts = set() # default context information for triples self.__defaultContexts = None def add(self, triple, context, quoted=False): """\ Add a triple to the store of triples. """ # add dictionary entries for spo[s][p][p] = 1 and pos[p][o][s] # = 1, creating the nested dictionaries where they do not yet # exits. Store.add(self, triple, context, quoted=quoted) if context is not None: self.__all_contexts.add(context) subject, predicate, object_ = triple spo = self.__spo try: po = spo[subject] except LookupError: po = spo[subject] = {} try: o = po[predicate] except LookupError: o = po[predicate] = {} try: _ = o[object_] # This cannot be reached if (s, p, o) was not inserted before. triple_exists = True except KeyError: o[object_] = 1 triple_exists = False self.__add_triple_context(triple, triple_exists, context, quoted) if triple_exists: # No need to insert twice this triple. return pos = self.__pos try: os = pos[predicate] except LookupError: os = pos[predicate] = {} try: s = os[object_] except LookupError: s = os[object_] = {} s[subject] = 1 osp = self.__osp try: sp = osp[object_] except LookupError: sp = osp[object_] = {} try: p = sp[subject] except LookupError: p = sp[subject] = {} p[predicate] = 1 def remove(self, triple_pattern, context=None): req_ctx = self.__ctx_to_str(context) for triple, c in self.triples(triple_pattern, context=context): subject, predicate, object_ = triple for ctx in self.__get_context_for_triple(triple): if context is not None and req_ctx != ctx: continue self.__remove_triple_context(triple, ctx) ctxs = self.__get_context_for_triple(triple, skipQuoted=True) if None in ctxs and (context is None or len(ctxs) == 1): # remove from default graph too self.__remove_triple_context(triple, None) if len(self.__get_context_for_triple(triple)) == 0: del self.__spo[subject][predicate][object_] del self.__pos[predicate][object_][subject] del self.__osp[object_][subject][predicate] del self.__tripleContexts[triple] if ( req_ctx is not None and req_ctx in self.__contextTriples and len(self.__contextTriples[req_ctx]) == 0 ): # all triples are removed out of this context # and it's not the default context so delete it del self.__contextTriples[req_ctx] if ( triple_pattern == (None, None, None) and context in self.__all_contexts and not self.graph_aware ): # remove the whole context self.__all_contexts.remove(context) def triples(self, triple_pattern, context=None): """A generator over all the triples matching""" req_ctx = self.__ctx_to_str(context) subject, predicate, object_ = triple_pattern # all triples case (no triple parts given as pattern) if subject is None and predicate is None and object_ is None: # Just dump all known triples from the given graph if req_ctx not in self.__contextTriples: return for triple in self.__contextTriples[req_ctx].copy(): yield triple, self.__contexts(triple) # optimize "triple in graph" case (all parts given) elif subject is not None and predicate is not None and object_ is not None: triple = triple_pattern try: _ = self.__spo[subject][predicate][object_] if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) except KeyError: return elif subject is not None: # subject is given spo = self.__spo if subject in spo: subjectDictionary = spo[subject] if predicate is not None: # subject+predicate is given if predicate in subjectDictionary: if object_ is not None: # subject+predicate+object is given if object_ in subjectDictionary[predicate]: triple = (subject, predicate, object_) if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) else: # given object not found pass else: # subject+predicate is given, object unbound for o in list(subjectDictionary[predicate].keys()): triple = (subject, predicate, o) if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) else: # given predicate not found pass else: # subject given, predicate unbound for p in list(subjectDictionary.keys()): if object_ is not None: # object is given if object_ in subjectDictionary[p]: triple = (subject, p, object_) if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) else: # given object not found pass else: # object unbound for o in list(subjectDictionary[p].keys()): triple = (subject, p, o) if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) else: # given subject not found pass elif predicate is not None: # predicate is given, subject unbound pos = self.__pos if predicate in pos: predicateDictionary = pos[predicate] if object_ is not None: # predicate+object is given, subject unbound if object_ in predicateDictionary: for s in list(predicateDictionary[object_].keys()): triple = (s, predicate, object_) if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) else: # given object not found pass else: # predicate is given, object+subject unbound for o in list(predicateDictionary.keys()): for s in list(predicateDictionary[o].keys()): triple = (s, predicate, o) if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) elif object_ is not None: # object is given, subject+predicate unbound osp = self.__osp if object_ in osp: objectDictionary = osp[object_] for s in list(objectDictionary.keys()): for p in list(objectDictionary[s].keys()): triple = (s, p, object_) if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) else: # subject+predicate+object unbound # Shouldn't get here if all other cases above worked correctly. spo = self.__spo for s in list(spo.keys()): subjectDictionary = spo[s] for p in list(subjectDictionary.keys()): for o in list(subjectDictionary[p].keys()): triple = (s, p, o) if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) def bind(self, prefix, namespace): self.__prefix[namespace] = prefix self.__namespace[prefix] = namespace def namespace(self, prefix): return self.__namespace.get(prefix, None) def prefix(self, namespace): return self.__prefix.get(namespace, None) def namespaces(self): for prefix, namespace in self.__namespace.items(): yield prefix, namespace def contexts(self, triple=None): if triple is None or triple == (None, None, None): return (context for context in self.__all_contexts) subj, pred, obj = triple try: _ = self.__spo[subj][pred][obj] return self.__contexts(triple) except KeyError: return (_ for _ in []) def __len__(self, context=None): ctx = self.__ctx_to_str(context) if ctx not in self.__contextTriples: return 0 return len(self.__contextTriples[ctx]) def add_graph(self, graph): if not self.graph_aware: Store.add_graph(self, graph) else: self.__all_contexts.add(graph) def remove_graph(self, graph): if not self.graph_aware: Store.remove_graph(self, graph) else: self.remove((None, None, None), graph) try: self.__all_contexts.remove(graph) except KeyError: pass # we didn't know this graph, no problem # internal utility methods below def __add_triple_context(self, triple, triple_exists, context, quoted): """add the given context to the set of contexts for the triple""" ctx = self.__ctx_to_str(context) quoted = bool(quoted) if triple_exists: # we know the triple exists somewhere in the store try: triple_context = self.__tripleContexts[triple] except KeyError: # triple exists with default ctx info # start with a copy of the default ctx info triple_context = self.__tripleContexts[ triple ] = self.__defaultContexts.copy() triple_context[ctx] = quoted if not quoted: triple_context[None] = quoted else: # the triple didn't exist before in the store if quoted: # this context only triple_context = self.__tripleContexts[triple] = {ctx: quoted} else: # default context as well triple_context = self.__tripleContexts[triple] = { ctx: quoted, None: quoted, } # if the triple is not quoted add it to the default context if not quoted: self.__contextTriples[None].add(triple) # always add the triple to given context, making sure it's initialized if ctx not in self.__contextTriples: self.__contextTriples[ctx] = set() self.__contextTriples[ctx].add(triple) # if this is the first ever triple in the store, set default ctx info if self.__defaultContexts is None: self.__defaultContexts = triple_context # if the context info is the same as default, no need to store it if triple_context == self.__defaultContexts: del self.__tripleContexts[triple] def __get_context_for_triple(self, triple, skipQuoted=False): """return a list of contexts (str) for the triple, skipping quoted contexts if skipQuoted==True""" ctxs = self.__tripleContexts.get(triple, self.__defaultContexts) if not skipQuoted: return ctxs.keys() return [ctx for ctx, quoted in ctxs.items() if not quoted] def __triple_has_context(self, triple, ctx): """return True if the triple exists in the given context""" return ctx in self.__tripleContexts.get(triple, self.__defaultContexts) def __remove_triple_context(self, triple, ctx): """remove the context from the triple""" ctxs = self.__tripleContexts.get(triple, self.__defaultContexts).copy() del ctxs[ctx] if ctxs == self.__defaultContexts: del self.__tripleContexts[triple] else: self.__tripleContexts[triple] = ctxs self.__contextTriples[ctx].remove(triple) def __ctx_to_str(self, ctx): if ctx is None: return None try: # ctx could be a graph. In that case, use its identifier ctx_str = "{}:{}".format(ctx.identifier.__class__.__name__, ctx.identifier) self.__context_obj_map[ctx_str] = ctx return ctx_str except AttributeError: # otherwise, ctx should be a URIRef or BNode or str if isinstance(ctx, str): ctx_str = "{}:{}".format(ctx.__class__.__name__, ctx) if ctx_str in self.__context_obj_map: return ctx_str self.__context_obj_map[ctx_str] = ctx return ctx_str raise RuntimeError("Cannot use that type of object as a Graph context") def __contexts(self, triple): """return a generator for all the non-quoted contexts (dereferenced) the encoded triple appears in""" return ( self.__context_obj_map.get(ctx_str, ctx_str) for ctx_str in self.__get_context_for_triple(triple, skipQuoted=True) if ctx_str is not None ) def query(self, query, initNs, initBindings, queryGraph, **kwargs): super(Memory, self).query(query, initNs, initBindings, queryGraph, **kwargs) def update(self, update, initNs, initBindings, queryGraph, **kwargs): super(Memory, self).update(update, initNs, initBindings, queryGraph, **kwargs) rdflib-6.1.1/rdflib/plugins/stores/regexmatching.py000066400000000000000000000144161415774155300224440ustar00rootroot00000000000000""" This wrapper intercepts calls through the store interface which make use of the REGEXTerm class to represent matches by REGEX instead of literal comparison. Implemented for stores that don't support this and essentially provides the support by replacing the REGEXTerms by wildcards (None) and matching against the results from the store it's wrapping. """ from rdflib.store import Store from rdflib.graph import Graph import re # Store is capable of doing its own REGEX matching NATIVE_REGEX = 0 # Store uses Python's re module internally for REGEX matching PYTHON_REGEX = 1 class REGEXTerm(str): """ REGEXTerm can be used in any term slot and is interpreted as a request to perform a REGEX match (not a string comparison) using the value (pre-compiled) for checking rdf:type matches """ def __init__(self, expr): self.compiledExpr = re.compile(expr) def __reduce__(self): return (REGEXTerm, (str(""),)) def regexCompareQuad(quad, regexQuad): for index in range(4): if isinstance(regexQuad[index], REGEXTerm) and not regexQuad[ index ].compiledExpr.match(quad[index]): return False return True class REGEXMatching(Store): def __init__(self, storage): self.storage = storage self.context_aware = storage.context_aware # NOTE: this store can't be formula_aware as it doesn't have enough # info to reverse the removal of a quoted statement. self.formula_aware = storage.formula_aware self.transaction_aware = storage.transaction_aware def open(self, configuration, create=True): return self.storage.open(configuration, create) def close(self, commit_pending_transaction=False): self.storage.close() def destroy(self, configuration): self.storage.destroy(configuration) def add(self, triple, context, quoted=False): (subject, predicate, object_) = triple self.storage.add((subject, predicate, object_), context, quoted) def remove(self, triple, context=None): (subject, predicate, object_) = triple if ( isinstance(subject, REGEXTerm) or isinstance(predicate, REGEXTerm) or isinstance(object_, REGEXTerm) or (context is not None and isinstance(context.identifier, REGEXTerm)) ): # One or more of the terms is a REGEX expression, so we must # replace it / them with wildcard(s)and match after we query. s = not isinstance(subject, REGEXTerm) and subject or None p = not isinstance(predicate, REGEXTerm) and predicate or None o = not isinstance(object_, REGEXTerm) and object_ or None c = ( (context is not None and not isinstance(context.identifier, REGEXTerm)) and context or None ) removeQuadList = [] for (s1, p1, o1), cg in self.storage.triples((s, p, o), c): for ctx in cg: ctx = ctx.identifier if regexCompareQuad( (s1, p1, o1, ctx), ( subject, predicate, object_, context is not None and context.identifier or context, ), ): removeQuadList.append((s1, p1, o1, ctx)) for s, p, o, c in removeQuadList: self.storage.remove((s, p, o), c and Graph(self, c) or c) else: self.storage.remove((subject, predicate, object_), context) def triples(self, triple, context=None): (subject, predicate, object_) = triple if ( isinstance(subject, REGEXTerm) or isinstance(predicate, REGEXTerm) or isinstance(object_, REGEXTerm) or (context is not None and isinstance(context.identifier, REGEXTerm)) ): # One or more of the terms is a REGEX expression, so we must # replace it / them with wildcard(s) and match after we query. s = not isinstance(subject, REGEXTerm) and subject or None p = not isinstance(predicate, REGEXTerm) and predicate or None o = not isinstance(object_, REGEXTerm) and object_ or None c = ( (context is not None and not isinstance(context.identifier, REGEXTerm)) and context or None ) for (s1, p1, o1), cg in self.storage.triples((s, p, o), c): matchingCtxs = [] for ctx in cg: if c is None: if context is None or context.identifier.compiledExpr.match( ctx.identifier ): matchingCtxs.append(ctx) else: matchingCtxs.append(ctx) if matchingCtxs and regexCompareQuad( (s1, p1, o1, None), (subject, predicate, object_, None) ): yield (s1, p1, o1), (c for c in matchingCtxs) else: for (s1, p1, o1), cg in self.storage.triples( (subject, predicate, object_), context ): yield (s1, p1, o1), cg def __len__(self, context=None): # NOTE: If the context is a REGEX this could be an expensive # proposition return self.storage.__len__(context) def contexts(self, triple=None): # NOTE: There is no way to control REGEX matching for this method at # this level as it only returns the contexts, not the matching # triples. for ctx in self.storage.contexts(triple): yield ctx def remove_context(self, identifier): self.storage.remove((None, None, None), identifier) def bind(self, prefix, namespace): self.storage.bind(prefix, namespace) def prefix(self, namespace): return self.storage.prefix(namespace) def namespace(self, prefix): return self.storage.namespace(prefix) def namespaces(self): return self.storage.namespaces() def commit(self): self.storage.commit() def rollback(self): self.storage.rollback() rdflib-6.1.1/rdflib/plugins/stores/sparqlconnector.py000066400000000000000000000133311415774155300230270ustar00rootroot00000000000000import logging from typing import Optional, TYPE_CHECKING, Tuple from urllib.request import urlopen, Request from urllib.parse import urlencode from urllib.error import HTTPError, URLError import base64 from io import BytesIO from rdflib.query import Result from rdflib import BNode log = logging.getLogger(__name__) if TYPE_CHECKING: import typing_extensions as te class SPARQLConnectorException(Exception): pass # TODO: Pull in these from the result implementation plugins? _response_mime_types = { "xml": "application/sparql-results+xml, application/rdf+xml", "json": "application/sparql-results+json", "csv": "text/csv", "tsv": "text/tab-separated-values", "application/rdf+xml": "application/rdf+xml", } class SPARQLConnector(object): """ this class deals with nitty gritty details of talking to a SPARQL server """ def __init__( self, query_endpoint: Optional[str] = None, update_endpoint: Optional[str] = None, returnFormat: str = "xml", method: "te.Literal['GET', 'POST', 'POST_FORM']" = "GET", auth: Optional[Tuple[str, str]] = None, **kwargs, ): """ auth, if present, must be a tuple of (username, password) used for Basic Authentication Any additional keyword arguments will be passed to to the request, and can be used to setup timesouts etc. """ self.returnFormat = returnFormat self.query_endpoint = query_endpoint self.update_endpoint = update_endpoint self.kwargs = kwargs self.method = method if auth is not None: if type(auth) != tuple: raise SPARQLConnectorException("auth must be a tuple") if len(auth) != 2: raise SPARQLConnectorException("auth must be a tuple (user, password)") base64string = base64.b64encode(bytes("%s:%s" % auth, "ascii")) self.kwargs.setdefault("headers", {}) self.kwargs["headers"].update( {"Authorization": "Basic %s" % base64string.decode("utf-8")} ) @property def method(self): return self._method @method.setter def method(self, method): if method not in ("GET", "POST", "POST_FORM"): raise SPARQLConnectorException( 'Method must be "GET", "POST", or "POST_FORM"' ) self._method = method def query(self, query, default_graph: str = None, named_graph: str = None): if not self.query_endpoint: raise SPARQLConnectorException("Query endpoint not set!") params = {} # this test ensures we don't have a useless (BNode) default graph URI, which calls to Graph().query() will add if default_graph is not None and type(default_graph) != BNode: params["default-graph-uri"] = default_graph headers = {"Accept": _response_mime_types[self.returnFormat]} args = dict(self.kwargs) # merge params/headers dicts args.setdefault("params", {}) args.setdefault("headers", {}) args["headers"].update(headers) if self.method == "GET": params["query"] = query args["params"].update(params) qsa = "?" + urlencode(args["params"]) try: res = urlopen( Request(self.query_endpoint + qsa, headers=args["headers"]) ) except Exception as e: raise ValueError( "You did something wrong formulating either the URI or your SPARQL query" ) elif self.method == "POST": args["headers"].update({"Content-Type": "application/sparql-query"}) qsa = "?" + urlencode(params) try: res = urlopen( Request( self.query_endpoint + qsa, data=query.encode(), headers=args["headers"], ) ) except HTTPError as e: return e.code, str(e), None elif self.method == "POST_FORM": params["query"] = query args["params"].update(params) try: res = urlopen( Request( self.query_endpoint, data=urlencode(args["params"]).encode(), headers=args["headers"], ) ) except HTTPError as e: return e.code, str(e), None else: raise SPARQLConnectorException("Unknown method %s" % self.method) return Result.parse( BytesIO(res.read()), content_type=res.headers["Content-Type"].split(";")[0] ) def update( self, query, default_graph: Optional[str] = None, named_graph: Optional[str] = None, ): if not self.update_endpoint: raise SPARQLConnectorException("Query endpoint not set!") params = {} if default_graph is not None: params["using-graph-uri"] = default_graph if named_graph is not None: params["using-named-graph-uri"] = named_graph headers = { "Accept": _response_mime_types[self.returnFormat], "Content-Type": "application/sparql-update", } args = dict(self.kwargs) # other QSAs args.setdefault("params", {}) args["params"].update(params) args.setdefault("headers", {}) args["headers"].update(headers) qsa = "?" + urlencode(args["params"]) res = urlopen( Request( self.update_endpoint + qsa, data=query.encode(), headers=args["headers"] ) ) rdflib-6.1.1/rdflib/plugins/stores/sparqlstore.py000066400000000000000000000762261415774155300222050ustar00rootroot00000000000000# -*- coding: utf-8 -*- # """ This is an RDFLib store around Ivan Herman et al.'s SPARQL service wrapper. This was first done in layer-cake, and then ported to RDFLib """ import re import collections from .sparqlconnector import SPARQLConnector from rdflib.plugins.stores.regexmatching import NATIVE_REGEX from rdflib.store import Store from rdflib import Variable, BNode from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.term import Node from typing import Any, Callable, Dict, Optional, Union, Tuple # Defines some SPARQL keywords LIMIT = "LIMIT" OFFSET = "OFFSET" ORDERBY = "ORDER BY" BNODE_IDENT_PATTERN = re.compile(r"(?P

"\tx" } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-esc-04.rq000066400000000000000000000001121415774155300240110ustar00rootroot00000000000000PREFIX : SELECT * WHERE { <\u0078> :\u0070 ?xx\u0078 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-esc-05.rq000066400000000000000000000002021415774155300240120ustar00rootroot00000000000000PREFIX : SELECT * # Comments can contain \ u # <\u0078> :\u0070 ?xx\u0078 WHERE { <\u0078> :\u0070 ?xx\u0078 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-ask-02.rq000066400000000000000000000000071415774155300247570ustar00rootroot00000000000000ASK {} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-construct01.rq000066400000000000000000000000701415774155300261470ustar00rootroot00000000000000CONSTRUCT { ?s . ?s ?o } WHERE {?s ?p ?o} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-construct02.rq000066400000000000000000000000711415774155300261510ustar00rootroot00000000000000CONSTRUCT { ?s . ?s ?o .} WHERE {?s ?p ?o} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-construct03.rq000066400000000000000000000002601415774155300261520ustar00rootroot00000000000000PREFIX rdf: CONSTRUCT { [] rdf:subject ?s ; rdf:predicate ?p ; rdf:object ?o } WHERE {?s ?p ?o} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-construct04.rq000066400000000000000000000002621415774155300261550ustar00rootroot00000000000000PREFIX rdf: CONSTRUCT { [] rdf:subject ?s ; rdf:predicate ?p ; rdf:object ?o . } WHERE {?s ?p ?o} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-construct06.rq000066400000000000000000000000261415774155300261550ustar00rootroot00000000000000CONSTRUCT {} WHERE {} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-describe01.rq000066400000000000000000000000151415774155300257020ustar00rootroot00000000000000DESCRIBE rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-describe02.rq000066400000000000000000000000471415774155300257100ustar00rootroot00000000000000DESCRIBE ?u WHERE { ?u . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-select-01.rq000066400000000000000000000000231415774155300254550ustar00rootroot00000000000000SELECT * WHERE { } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-form-select-02.rq000066400000000000000000000000151415774155300254570ustar00rootroot00000000000000SELECT * { } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-function-01.rq000066400000000000000000000001051415774155300250630ustar00rootroot00000000000000PREFIX q: SELECT * WHERE { FILTER (q:name()) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-function-02.rq000066400000000000000000000001061415774155300250650ustar00rootroot00000000000000PREFIX q: SELECT * WHERE { FILTER (q:name( )) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-function-03.rq000066400000000000000000000001061415774155300250660ustar00rootroot00000000000000PREFIX q: SELECT * WHERE { FILTER (q:name( )) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-function-04.rq000066400000000000000000000001651415774155300250740ustar00rootroot00000000000000PREFIX q: SELECT * WHERE { FILTER (q:name(1 )) . FILTER (q:name(1,2)) . FILTER (q:name(1 ,2))} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-01.rq000066400000000000000000000000351415774155300246550ustar00rootroot00000000000000SELECT * WHERE { } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-02.rq000066400000000000000000000000351415774155300246560ustar00rootroot00000000000000SELECT * WHERE { _:x } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-03.rq000066400000000000000000000000331415774155300246550ustar00rootroot00000000000000SELECT * WHERE { 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-04.rq000066400000000000000000000000351415774155300246600ustar00rootroot00000000000000SELECT * WHERE { +11 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-05.rq000066400000000000000000000000341415774155300246600ustar00rootroot00000000000000SELECT * WHERE { -1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-06.rq000066400000000000000000000000351415774155300246620ustar00rootroot00000000000000SELECT * WHERE { 1.0 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-07.rq000066400000000000000000000000361415774155300246640ustar00rootroot00000000000000SELECT * WHERE { +1.0 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-08.rq000066400000000000000000000000361415774155300246650ustar00rootroot00000000000000SELECT * WHERE { -1.0 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-09.rq000066400000000000000000000000371415774155300246670ustar00rootroot00000000000000SELECT * WHERE { 1.0e0 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-10.rq000066400000000000000000000000411415774155300246520ustar00rootroot00000000000000SELECT * WHERE { +1.0e+1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-11.rq000066400000000000000000000000411415774155300246530ustar00rootroot00000000000000SELECT * WHERE { -1.0e-1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-12.rq000066400000000000000000000000721415774155300246600ustar00rootroot00000000000000# Legal, if unusual, IRIs SELECT * WHERE { } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-13.rq000066400000000000000000000001321415774155300246560ustar00rootroot00000000000000# Legal, if unusual, IRIs BASE SELECT * WHERE { <#x> } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-general-14.rq000066400000000000000000000001521415774155300246610ustar00rootroot00000000000000# Legal, if unusual, IRIs BASE SELECT * WHERE { <¶m=value> } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-graph-01.rq000066400000000000000000000001021415774155300243340ustar00rootroot00000000000000PREFIX : SELECT * WHERE { GRAPH ?g { } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-graph-02.rq000066400000000000000000000001021415774155300243350ustar00rootroot00000000000000PREFIX : SELECT * WHERE { GRAPH :a { } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-graph-03.rq000066400000000000000000000001131415774155300243400ustar00rootroot00000000000000PREFIX : SELECT * WHERE { GRAPH ?g { :x :b ?a } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-graph-04.rq000066400000000000000000000001261415774155300243450ustar00rootroot00000000000000PREFIX : SELECT * WHERE { :x :p :z GRAPH ?g { :x :b ?a } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-graph-05.rq000066400000000000000000000001561415774155300243510ustar00rootroot00000000000000PREFIX : SELECT * WHERE { :x :p :z GRAPH ?g { :x :b ?a . GRAPH ?g2 { :x :p ?x } } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-keywords-01.rq000066400000000000000000000002041415774155300251050ustar00rootroot00000000000000# use keyword FILTER as a namespace prefix PREFIX FILTER: SELECT * WHERE { ?x FILTER:foo ?z FILTER (?z) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-keywords-02.rq000066400000000000000000000001651415774155300251140ustar00rootroot00000000000000# use keyword FILTER as a local name PREFIX : SELECT * WHERE { ?x :FILTER ?z FILTER (?z) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-keywords-03.rq000066400000000000000000000001651415774155300251150ustar00rootroot00000000000000# use keyword UNION as a namespace prefix PREFIX UNION: SELECT * WHERE { ?x UNION:foo ?z } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-lists-01.rq000066400000000000000000000000721415774155300243770ustar00rootroot00000000000000PREFIX : SELECT * WHERE { () :p 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-lists-02.rq000066400000000000000000000000731415774155300244010ustar00rootroot00000000000000PREFIX : SELECT * WHERE { ( ) :p 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-lists-03.rq000066400000000000000000000000741415774155300244030ustar00rootroot00000000000000PREFIX : SELECT * WHERE { ( ) :p 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-lists-04.rq000066400000000000000000000000771415774155300244070ustar00rootroot00000000000000PREFIX : SELECT * WHERE { ( 1 2 ) :p 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql2/syntax-lists-05.rq000066400000000000000000000000721415774155300244030ustar00rootroot00000000000000PREFIX : SELECT * WHERE { ( 1 2 ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/000077500000000000000000000000001415774155300211355ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/manifest.ttl000066400000000000000000000340561415774155300235000ustar00rootroot00000000000000@prefix rdf: . @prefix : . @prefix rdfs: . @prefix mf: . @prefix qt: . @prefix dawgt: . <> rdf:type mf:Manifest ; rdfs:comment "Syntax tests syntax-sparql3" ; mf:entries ( :syn-01 :syn-02 :syn-03 :syn-04 :syn-05 :syn-06 :syn-07 :syn-08 :syn-bad-01 :syn-bad-02 :syn-bad-03 :syn-bad-04 :syn-bad-05 :syn-bad-06 :syn-bad-07 :syn-bad-08 :syn-bad-09 :syn-bad-10 :syn-bad-11 :syn-bad-12 :syn-bad-13 :syn-bad-14 :syn-bad-15 :syn-bad-16 :syn-bad-17 :syn-bad-18 :syn-bad-19 :syn-bad-20 :syn-bad-21 :syn-bad-22 :syn-bad-23 :syn-bad-24 :syn-bad-25 :syn-bad-26 :syn-bad-27 :syn-bad-28 :syn-bad-29 :syn-bad-30 :syn-bad-31 :bnode-dot :bnodes-missing-pvalues-01 :bnodes-missing-pvalues-02 :empty-optional-01 :empty-optional-02 :filter-missing-parens :lone-list :lone-node :blabel-cross-filter :blabel-cross-graph-bad :blabel-cross-optional-bad :blabel-cross-union-bad ) . :syn-01 mf:name "syn-01.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-02 mf:name "syn-02.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-03 mf:name "syn-03.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-04 mf:name "syn-04.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-05 mf:name "syn-05.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-06 mf:name "syn-06.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-07 mf:name "syn-07.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-08 mf:name "syn-08.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-01 mf:name "syn-bad-01.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-02 mf:name "syn-bad-02.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-03 mf:name "syn-bad-03.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-04 mf:name "syn-bad-04.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-05 mf:name "syn-bad-05.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-06 mf:name "syn-bad-06.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-07 mf:name "syn-bad-07.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-08 mf:name "syn-bad-08.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-09 mf:name "syn-bad-09.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-10 mf:name "syn-bad-10.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-11 mf:name "syn-bad-11.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-12 mf:name "syn-bad-12.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-13 mf:name "syn-bad-13.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-14 mf:name "syn-bad-14.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-15 mf:name "syn-bad-15.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-16 mf:name "syn-bad-16.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-17 mf:name "syn-bad-17.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-18 mf:name "syn-bad-18.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-19 mf:name "syn-bad-19.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-20 mf:name "syn-bad-20.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-21 mf:name "syn-bad-21.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-22 mf:name "syn-bad-22.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-23 mf:name "syn-bad-23.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-24 mf:name "syn-bad-24.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-25 mf:name "syn-bad-25.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-26 mf:name "syn-bad-26.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-27 mf:name "syn-bad-27.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-28 mf:name "syn-bad-28.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-29 mf:name "syn-bad-29.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-30 mf:name "syn-bad-30.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-31 mf:name "syn-bad-31.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :bnode-dot mf:name "syn-bad-bnode-dot.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :bnodes-missing-pvalues-01 mf:name "syn-bad-bnodes-missing-pvalues-01.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :bnodes-missing-pvalues-02 mf:name "syn-bad-bnodes-missing-pvalues-02.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :empty-optional-01 mf:name "syn-bad-empty-optional-01.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :empty-optional-02 mf:name "syn-bad-empty-optional-02.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :filter-missing-parens mf:name "syn-bad-filter-missing-parens.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :lone-list mf:name "syn-bad-lone-list.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :lone-node mf:name "syn-bad-lone-node.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :blabel-cross-filter mf:name "syn-blabel-cross-filter" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :blabel-cross-graph-bad mf:name "syn-blabel-cross-graph-bad" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :blabel-cross-optional-bad mf:name "syn-blabel-cross-optional-bad" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :blabel-cross-union-bad mf:name "syn-blabel-cross-union-bad" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-01.rq000066400000000000000000000000611415774155300225250ustar00rootroot00000000000000# Dot after triple SELECT * WHERE { ?s ?p ?o . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-02.rq000066400000000000000000000000621415774155300225270ustar00rootroot00000000000000# No dot after triple SELECT * WHERE { ?s ?p ?o } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-03.rq000066400000000000000000000000511415774155300225260ustar00rootroot00000000000000SELECT * WHERE { ?s ?p ?o . ?s ?p ?o . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-04.rq000066400000000000000000000000601415774155300225270ustar00rootroot00000000000000# No dot SELECT * WHERE { ?s ?p ?o . ?s ?p ?o } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-05.rq000066400000000000000000000000731415774155300225340ustar00rootroot00000000000000# DOT after non-triples SELECT * WHERE { FILTER (?o>5) . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-06.rq000066400000000000000000000001041415774155300225300ustar00rootroot00000000000000# DOT after non-triples SELECT * WHERE { FILTER (?o>5) . ?s ?p ?o } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-07.rq000066400000000000000000000001261415774155300225350ustar00rootroot00000000000000# Trailing ; PREFIX : SELECT * WHERE { :s :p :o ; FILTER(?x) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-08.rq000066400000000000000000000001131415774155300225320ustar00rootroot00000000000000# Broken ; PREFIX : SELECT * WHERE { :s :p :o ; . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-01.rq000066400000000000000000000001251415774155300232520ustar00rootroot00000000000000# More a test that bad syntax tests work! PREFIX ex: SELECT * rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-02.rq000066400000000000000000000001401415774155300232500ustar00rootroot00000000000000# Missing DOT, 2 triples PREFIX : SELECT * { :s1 :p1 :o1 :s2 :p2 :o2 . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-03.rq000066400000000000000000000001451415774155300232560ustar00rootroot00000000000000# Missing DOT between triples PREFIX : SELECT * { :s1 :p1 :o1 :s2 :p2 :o2 . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-04.rq000066400000000000000000000001571415774155300232620ustar00rootroot00000000000000# Missing DOT after ; between triples PREFIX : SELECT * { :s1 :p1 :o1 ; :s2 :p2 :o2 . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-05.rq000066400000000000000000000000471415774155300232610ustar00rootroot00000000000000# DOT, no triples SELECT * WHERE { . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-06.rq000066400000000000000000000000511415774155300232550ustar00rootroot00000000000000# DOT, no triples SELECT * WHERE { . . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-07.rq000066400000000000000000000000621415774155300232600ustar00rootroot00000000000000# DOT, then triples SELECT * WHERE { . ?s ?p ?o } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-08.rq000066400000000000000000000000601415774155300232570ustar00rootroot00000000000000# Multiple DOTs SELECT * WHERE { ?s ?p ?o . . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-09.rq000066400000000000000000000000571415774155300232660ustar00rootroot00000000000000# Multiple DOTs SELECT * WHERE { ?s ?p ?o .. } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-10.rq000066400000000000000000000000741415774155300232550ustar00rootroot00000000000000# Multiple DOTs SELECT * WHERE { ?s ?p ?o . . ?s1 ?p1 ?o1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-11.rq000066400000000000000000000000731415774155300232550ustar00rootroot00000000000000# Multiple DOTs SELECT * WHERE { ?s ?p ?o .. ?s1 ?p1 ?o1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-12.rq000066400000000000000000000000741415774155300232570ustar00rootroot00000000000000# Multiple DOTs SELECT * WHERE { ?s ?p ?o . . ?s1 ?p1 ?o1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-13.rq000066400000000000000000000000751415774155300232610ustar00rootroot00000000000000# Multiple DOTs SELECT * WHERE { ?s ?p ?o . ?s1 ?p1 ?o1 .. } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-14.rq000066400000000000000000000000621415774155300232560ustar00rootroot00000000000000# DOT, no triples SELECT * WHERE { . FILTER(?x) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-15.rq000066400000000000000000000000531415774155300232570ustar00rootroot00000000000000# Broken ; SELECT * WHERE { ; FILTER(?x) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-16.rq000066400000000000000000000001111415774155300232530ustar00rootroot00000000000000# Broken ; PREFIX : SELECT * WHERE { :s ; :p :o } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-17.rq000066400000000000000000000001061415774155300232600ustar00rootroot00000000000000# Broken ; PREFIX : SELECT * WHERE { :s :p ; } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-18.rq000066400000000000000000000001211415774155300232560ustar00rootroot00000000000000# Broken ; PREFIX : SELECT * WHERE { :s :p ; FILTER(?x) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-19.rq000066400000000000000000000001141415774155300232610ustar00rootroot00000000000000# Broken ; PREFIX : SELECT * WHERE { :s :p :o . ; } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-20.rq000066400000000000000000000001121415774155300232470ustar00rootroot00000000000000# Broken , PREFIX : SELECT * WHERE { :s , :p :o } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-21.rq000066400000000000000000000001131415774155300232510ustar00rootroot00000000000000# Broken , PREFIX : SELECT * WHERE { :s :p , :o } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-22.rq000066400000000000000000000001071415774155300232550ustar00rootroot00000000000000# Broken , PREFIX : SELECT * WHERE { :s :p , } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-23.rq000066400000000000000000000001261415774155300232570ustar00rootroot00000000000000# Broken , can't trail PREFIX : SELECT * WHERE { :s :p :o , } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-24.rq000066400000000000000000000001401415774155300232540ustar00rootroot00000000000000# Broken , (should be ;) PREFIX : SELECT * WHERE { :s :p1 :o1 , :p2 :o2} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-25.rq000066400000000000000000000000131415774155300232540ustar00rootroot00000000000000CONSTRUCT rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-26.rq000066400000000000000000000002261415774155300232630ustar00rootroot00000000000000# Tokenizing matters. # "longest token rule" means this isn't a "<" and "&&" PREFIX : SELECT * WHERE { FILTER (?x?y) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-27.rq000066400000000000000000000000761415774155300232670ustar00rootroot00000000000000PREFIX : SELECT * WHERE { :x [] :q } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-28.rq000066400000000000000000000000771415774155300232710ustar00rootroot00000000000000PREFIX : SELECT * WHERE { :x _:a :q } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-29.rq000066400000000000000000000001151415774155300232630ustar00rootroot00000000000000# Syntactic blank node in a filter. SELECT * WHERE { _:x FILTER(_:x) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-30.rq000066400000000000000000000001211415774155300232500ustar00rootroot00000000000000# Syntactic blank node in a filter. SELECT * WHERE { _:x FILTER(_:x < 3) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-31.rq000066400000000000000000000001021415774155300232500ustar00rootroot00000000000000PREFIX : SELECT * WHERE { GRAPH [] { } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-bnode-dot.rq000066400000000000000000000000651415774155300247100ustar00rootroot00000000000000# NegativeSyntax/bnode-dot.rq SELECT * WHERE {[] . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-bnodes-missing-pvalues-01.rq000066400000000000000000000001531415774155300276470ustar00rootroot00000000000000# NegativeSyntax/bnodes-missing-pvalues.rq PREFIX : SELECT * WHERE { [,] :p [;] . } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-bnodes-missing-pvalues-02.rq000066400000000000000000000001201415774155300276420ustar00rootroot00000000000000# NegativeSyntax/bnodes-missing-pvalues-02.rq SELECT * WHERE {() . [,] . [,;] } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-empty-optional-01.rq000066400000000000000000000001051415774155300262270ustar00rootroot00000000000000# NegativeSyntax/empty-optional.rq SELECT * { OPTIONAL FILTER (?x) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-empty-optional-02.rq000066400000000000000000000001321415774155300262300ustar00rootroot00000000000000# NegativeSyntax/empty-optional-02.rq SELECT * { OPTIONAL GRAPH ?v OPTIONAL FILTER (?x) } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-filter-missing-parens.rq000066400000000000000000000001121415774155300272500ustar00rootroot00000000000000# NegativeSyntax/filter-missing-parens.rq SELECT * { ?s ?p ?o FILTER ?x } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-lone-list.rq000066400000000000000000000000641415774155300247420ustar00rootroot00000000000000# NegativeSyntax/lone-list.rq SELECT * WHERE { () } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-bad-lone-node.rq000066400000000000000000000000631415774155300247130ustar00rootroot00000000000000# NegativeSyntax/lone-node.rq SELECT * WHERE {} rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-blabel-cross-filter.rq000066400000000000000000000004241415774155300261430ustar00rootroot00000000000000# $Id: syn-blabel-cross-filter.rq,v 1.2 2007/04/09 21:40:22 eric Exp $ # BNode label used across a FILTER. PREFIX : ASK { _:who :homepage ?homepage FILTER REGEX(?homepage, "^http://example.org/") _:who :schoolHomepage ?schoolPage } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-blabel-cross-graph-bad.rq000066400000000000000000000004141415774155300265020ustar00rootroot00000000000000# $Id: syn-blabel-cross-graph-bad.rq,v 1.2 2007/04/18 23:11:57 eric Exp $ # BNode label used across a GRAPH. PREFIX : ASK { _:who :homepage ?homepage GRAPH ?g { ?someone :made ?homepage } _:who :schoolHomepage ?schoolPage } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-blabel-cross-optional-bad.rq000066400000000000000000000006221415774155300272270ustar00rootroot00000000000000# $Id: syn-blabel-cross-optional-bad.rq,v 1.5 2007/09/04 15:04:22 eric Exp $ # BNode label used across an OPTIONAL. # This isn't necessarily a *syntax* test, but references to bnode labels # may not span basic graph patterns. PREFIX foaf: ASK { _:who foaf:homepage ?homepage OPTIONAL { ?someone foaf:made ?homepage } _:who foaf:schoolHomepage ?schoolPage } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql3/syn-blabel-cross-union-bad.rq000066400000000000000000000006651415774155300265410ustar00rootroot00000000000000# $Id: syn-blabel-cross-union-bad.rq,v 1.4 2007/09/04 15:04:09 eric Exp $ # BNode label used across a UNION. # This isn't necessarily a *syntax* test, but references to bnode labels # may not span basic graph patterns. PREFIX foaf: ASK { _:who foaf:homepage ?homepage { ?someone foaf:made ?homepage } UNION { ?homepage foaf:maker ?someone } _:who foaf:schoolHomepage ?schoolPage } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/000077500000000000000000000000001415774155300211365ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/manifest.ttl000066400000000000000000000076331415774155300235020ustar00rootroot00000000000000@prefix rdf: . @prefix : . @prefix rdfs: . @prefix mf: . @prefix qt: . @prefix dawgt: . <> rdf:type mf:Manifest ; rdfs:comment "Syntax tests syntax-sparql4" ; mf:entries ( :syn-09 :syn-10 :syn-11 :syn-bad-34 :syn-bad-35 :syn-bad-36 :syn-bad-37 :syn-bad-38 :syn-bad-OPT-breaks-BGP :syn-bad-UNION-breaks-BGP :syn-bad-GRAPH-breaks-BGP :syn-leading-digits-in-prefixed-names) . :syn-09 mf:name "syn-09.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-10 mf:name "syn-10.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-11 mf:name "syn-11.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-leading-digits-in-prefixed-names mf:name "syn-leading-digits-in-prefixed-names.rq" ; dawgt:approvedBy ; dawgt:approval dawgt:Approved ; rdf:type mf:PositiveSyntaxTest ; mf:action . :syn-bad-34 mf:name "syn-bad-34.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-35 mf:name "syn-bad-35.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-36 mf:name "syn-bad-36.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-37 mf:name "syn-bad-37.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-38 mf:name "syn-bad-38.rq" ; rdf:type mf:NegativeSyntaxTest ; mf:action ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :syn-bad-OPT-breaks-BGP mf:name "syn-bad-OPT-breaks-BGP" ; rdfs:comment "bad: re-used BNode label after OPTIONAL" ; dawgt:approvedBy ; dawgt:approval dawgt:Approved ; rdf:type mf:NegativeSyntaxTest ; mf:action . :syn-bad-UNION-breaks-BGP mf:name "syn-bad-UNION-breaks-BGP" ; rdfs:comment "bad: re-used BNode label after UNION" ; dawgt:approvedBy ; dawgt:approval dawgt:Approved ; rdf:type mf:NegativeSyntaxTest ; mf:action . :syn-bad-GRAPH-breaks-BGP mf:name "syn-bad-GRAPH-breaks-BGP" ; rdfs:comment "bad: re-used BNode label after GRAPH" ; dawgt:approvedBy ; dawgt:approval dawgt:Approved ; rdf:type mf:NegativeSyntaxTest ; mf:action . rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-09.rq000066400000000000000000000001131415774155300225340ustar00rootroot00000000000000PREFIX : SELECT * WHERE { _:a ?p ?v . _:a ?q 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-10.rq000066400000000000000000000001451415774155300225310ustar00rootroot00000000000000PREFIX : SELECT * WHERE { { _:a ?p ?v . _:a ?q _:a } UNION { _:b ?q _:c } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-11.rq000066400000000000000000000001321415774155300225260ustar00rootroot00000000000000PREFIX : SELECT * WHERE { _:a ?p ?v . FILTER(true) . [] ?q _:a } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-bad-34.rq000066400000000000000000000001151415774155300232600ustar00rootroot00000000000000PREFIX : SELECT * WHERE { _:a ?p ?v . { _:a ?q 1 } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-bad-35.rq000066400000000000000000000001161415774155300232620ustar00rootroot00000000000000PREFIX : SELECT * WHERE { { _:a ?p ?v . } _:a ?q 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-bad-36.rq000066400000000000000000000001301415774155300232570ustar00rootroot00000000000000PREFIX : SELECT * WHERE { { _:a ?p ?v . } UNION { _:a ?q 1 } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-bad-37.rq000066400000000000000000000001161415774155300232640ustar00rootroot00000000000000PREFIX : SELECT * WHERE { { _:a ?p ?v . } _:a ?q 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-bad-38.rq000066400000000000000000000001251415774155300232650ustar00rootroot00000000000000PREFIX : SELECT * WHERE { _:a ?p ?v . OPTIONAL {_:a ?q 1 } } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-bad-GRAPH-breaks-BGP.rq000066400000000000000000000003171415774155300255520ustar00rootroot00000000000000# bad: re-used BNode label after GRAPH # $Id: syn-bad-GRAPH-breaks-BGP.rq,v 1.1 2007/02/15 15:14:31 eric Exp $ PREFIX : SELECT * WHERE { _:a ?p ?v . GRAPH ?g { ?s ?p ?v } _:a ?q 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-bad-OPT-breaks-BGP.rq000066400000000000000000000003201415774155300253450ustar00rootroot00000000000000# bad: re-used BNode label after OPTIONAL # $Id: syn-bad-OPT-breaks-BGP.rq,v 1.1 2007/02/15 15:14:31 eric Exp $ PREFIX : SELECT * WHERE { _:a ?p ?v . OPTIONAL { ?s ?p ?v } _:a ?q 1 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-bad-UNION-breaks-BGP.rq000066400000000000000000000005141415774155300256000ustar00rootroot00000000000000# bad: re-used BNode label after UNION # $Id: syn-bad-UNION-breaks-BGP.rq,v 1.3 2007/09/04 15:03:54 eric Exp $ # This isn't necessarily a *syntax* test, but references to bnode labels # may not span basic graph patterns. PREFIX : SELECT * WHERE { _:a ?p ?v1 { ?s ?o } UNION { ?s ?o } _:a ?p ?v2 } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql4/syn-leading-digits-in-prefixed-names.rq000066400000000000000000000004311415774155300305040ustar00rootroot00000000000000PREFIX dob: PREFIX time: PREFIX dc: SELECT ?desc WHERE { dob:1D a time:ProperInterval; dc:description ?desc. } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql5/000077500000000000000000000000001415774155300211375ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql5/manifest.ttl000066400000000000000000000022401415774155300234700ustar00rootroot00000000000000@prefix rdf: . @prefix : . @prefix rdfs: . @prefix mf: . @prefix qt: . @prefix dawgt: . <> rdf:type mf:Manifest ; rdfs:comment "Syntax tests syntax-sparql5" ; mf:entries ( :syntax-reduced-01 :syntax-reduced-02 ). :syntax-reduced-01 mf:name "syntax-reduced-01.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; . :syntax-reduced-02 mf:name "syntax-reduced-02.rq" ; rdf:type mf:PositiveSyntaxTest ; mf:action ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; . rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql5/syntax-reduced-01.rq000066400000000000000000000000441415774155300246560ustar00rootroot00000000000000SELECT REDUCED * WHERE { ?x ?y ?z } rdflib-6.1.1/test/DAWG/data-r2-1.0/syntax-sparql5/syntax-reduced-02.rq000066400000000000000000000000501415774155300246540ustar00rootroot00000000000000SELECT REDUCED ?x ?y WHERE { ?x ?y ?z } rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/000077500000000000000000000000001415774155300206155ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/data-01.ttl000066400000000000000000000001001415774155300224600ustar00rootroot00000000000000@prefix : . :x :p :v1 . :x :p :v2 . rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/data-02.ttl000066400000000000000000000001131415774155300224650ustar00rootroot00000000000000@prefix : . :y :y :x . :x :y :y . :y :x :y . rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/data-03.ttl000066400000000000000000000001041415774155300224660ustar00rootroot00000000000000@prefix : . :x :p :v1.1 . :x :p :v2.1 . rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/dawg-data-01.ttl000066400000000000000000000012551415774155300234140ustar00rootroot00000000000000@prefix foaf: . @prefix rdf: . @prefix rdfs: . _:alice rdf:type foaf:Person ; foaf:name "Alice" ; foaf:mbox ; foaf:knows _:bob ; . _:bob rdf:type foaf:Person ; foaf:name "Bob" ; foaf:knows _:alice ; foaf:mbox ; foaf:mbox ; . _:eve rdf:type foaf:Person ; foaf:name "Eve" ; foaf:knows _:fred ; . _:fred rdf:type foaf:Person ; foaf:mbox . rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/dawg-tp-01.rq000066400000000000000000000001031415774155300227340ustar00rootroot00000000000000PREFIX : SELECT * WHERE { :x ?p ?q . } rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/dawg-tp-02.rq000066400000000000000000000001041415774155300227360ustar00rootroot00000000000000PREFIX : SELECT * WHERE { ?x :p ?q . } rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/dawg-tp-03.rq000066400000000000000000000000361415774155300227430ustar00rootroot00000000000000SELECT * WHERE { ?a ?a ?b . } rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/dawg-tp-04.rq000066400000000000000000000002671415774155300227520ustar00rootroot00000000000000PREFIX rdf: PREFIX foaf: SELECT ?name WHERE { ?x rdf:type foaf:Person . ?x foaf:name ?name . } rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/dawg-tp-05.rq000066400000000000000000000001041415774155300227410ustar00rootroot00000000000000PREFIX : SELECT * WHERE { ?x :p ?q . } rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/manifest.ttl000066400000000000000000000045451415774155300231600ustar00rootroot00000000000000@prefix rdf: . @prefix : . @prefix rdfs: . @prefix dawgt: . @prefix mf: . @prefix qt: . <> rdf:type mf:Manifest ; rdfs:comment "Some simple DAWG query evaluation test cases" ; mf:entries ( :dawg-triple-pattern-001 :dawg-triple-pattern-002 :dawg-triple-pattern-003 :dawg-triple-pattern-004 ). :dawg-triple-pattern-001 a mf:QueryEvaluationTest ; mf:name "dawg-triple-pattern-001" ; rdfs:comment "Simple triple match" ; mf:action [ qt:query ; qt:data ] ; mf:result ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :dawg-triple-pattern-002 a mf:QueryEvaluationTest ; mf:name "dawg-triple-pattern-002" ; rdfs:comment "Simple triple match" ; mf:action [ qt:query ; qt:data ] ; mf:result ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :dawg-triple-pattern-003 a mf:QueryEvaluationTest ; mf:name "dawg-triple-pattern-003" ; rdfs:comment "Simple triple match - repeated variable" ; mf:action [ qt:query ; qt:data ] ; mf:result ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . :dawg-triple-pattern-004 a mf:QueryEvaluationTest ; mf:name "dawg-triple-pattern-004" ; rdfs:comment "Simple triple match - two triples, common variable" ; mf:action [ qt:query ; qt:data ] ; mf:result ; dawgt:approvedBy ; dawgt:approval dawgt:Approved . rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/result-tp-01.ttl000066400000000000000000000015521415774155300235220ustar00rootroot00000000000000@prefix rs: . [] rs:ResultSet ; rs:resultVariable "p" , "q" ; rs:solution [ rs:binding [ rs:value ; rs:variable "q" ] ; rs:binding [ rs:value ; rs:variable "p" ] ] ; rs:solution [ rs:binding [ rs:value ; rs:variable "q" ] ; rs:binding [ rs:value ; rs:variable "p" ] ] . rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/result-tp-02.ttl000066400000000000000000000015521415774155300235230ustar00rootroot00000000000000@prefix rs: . [] rs:ResultSet ; rs:resultVariable "q" , "x" ; rs:solution [ rs:binding [ rs:value ; rs:variable "q" ] ; rs:binding [ rs:value ; rs:variable "x" ] ] ; rs:solution [ rs:binding [ rs:value ; rs:variable "q" ] ; rs:binding [ rs:value ; rs:variable "x" ] ] . rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/result-tp-03.ttl000066400000000000000000000010361415774155300235210ustar00rootroot00000000000000@prefix rs: . [] rs:ResultSet ; rs:resultVariable "a" , "b" ; rs:solution [ rs:binding [ rs:value ; rs:variable "a" ] ; rs:binding [ rs:value ; rs:variable "b" ] ] . rdflib-6.1.1/test/DAWG/data-r2-1.0/triple-match/result-tp-04.ttl000066400000000000000000000012441415774155300235230ustar00rootroot00000000000000@prefix rs: . [] rs:ResultSet ; rs:resultVariable "name" ; rs:solution [ rs:binding [ rs:value "Bob" ; rs:variable "name" ] ] ; rs:solution [ rs:binding [ rs:value "Alice" ; rs:variable "name" ] ] ; rs:solution [ rs:binding [ rs:value "Eve" ; rs:variable "name" ] ] . rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/000077500000000000000000000000001415774155300212315ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/false.ttl000066400000000000000000000005471415774155300230560ustar00rootroot00000000000000# Simple false result to an ASK query. # $Id: false.ttl,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ @prefix rdf: . @prefix rs: . @prefix xsd: . [] rdf:type rs:ResultSet ; rs:boolean "false"^^xsd:boolean . rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/manifest.ttl000066400000000000000000000372171415774155300235760ustar00rootroot00000000000000# $Id: manifest.ttl,v 1.7 2007/09/26 14:28:52 lfeigenb Exp $ @prefix rdf: . @prefix : . @prefix rdfs: . @prefix dawgt: . @prefix mf: . @prefix qt: . <> rdf:type mf:Manifest ; rdfs:comment "Type Promotion Tests" ; mf:entries ( :type-promotion-01 :type-promotion-02 :type-promotion-03 :type-promotion-04 :type-promotion-05 :type-promotion-06 :type-promotion-07 :type-promotion-08 :type-promotion-09 :type-promotion-10 :type-promotion-11 :type-promotion-12 :type-promotion-13 :type-promotion-14 :type-promotion-15 :type-promotion-16 :type-promotion-17 :type-promotion-18 :type-promotion-19 :type-promotion-20 :type-promotion-21 :type-promotion-22 :type-promotion-23 :type-promotion-24 :type-promotion-25 :type-promotion-26 :type-promotion-27 :type-promotion-28 :type-promotion-29 :type-promotion-30 ) . :type-promotion-01 rdf:type mf:QueryEvaluationTest ; mf:name "tP-double-double" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-02 rdf:type mf:QueryEvaluationTest ; mf:name "tP-double-float" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-03 rdf:type mf:QueryEvaluationTest ; mf:name "tP-double-decimal" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-04 rdf:type mf:QueryEvaluationTest ; mf:name "tP-float-float" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-05 rdf:type mf:QueryEvaluationTest ; mf:name "tP-float-decimal" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-06 rdf:type mf:QueryEvaluationTest ; mf:name "tP-decimal-decimal" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-07 rdf:type mf:QueryEvaluationTest ; mf:name "tP-integer-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-08 rdf:type mf:QueryEvaluationTest ; mf:name "tP-nonPositiveInteger-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-09 rdf:type mf:QueryEvaluationTest ; mf:name "tP-negativeInteger-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-10 rdf:type mf:QueryEvaluationTest ; mf:name "tP-long-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-11 rdf:type mf:QueryEvaluationTest ; mf:name "tP-int-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-12 rdf:type mf:QueryEvaluationTest ; mf:name "tP-short-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-13 rdf:type mf:QueryEvaluationTest ; mf:name "tP-byte-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-14 rdf:type mf:QueryEvaluationTest ; mf:name "tP-nonNegativeInteger-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-15 rdf:type mf:QueryEvaluationTest ; mf:name "tP-unsignedLong-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-16 rdf:type mf:QueryEvaluationTest ; mf:name "tP-unsignedInt-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-17 rdf:type mf:QueryEvaluationTest ; mf:name "tP-unsignedShort-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-18 rdf:type mf:QueryEvaluationTest ; mf:name "tP-unsignedByte-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-19 rdf:type mf:QueryEvaluationTest ; mf:name "tP-positiveInteger-short" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-20 rdf:type mf:QueryEvaluationTest ; mf:name "tP-short-double" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-21 rdf:type mf:QueryEvaluationTest ; mf:name "tP-short-float" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-22 rdf:type mf:QueryEvaluationTest ; mf:name "tP-short-decimal" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-23 rdf:type mf:QueryEvaluationTest ; mf:name "tP-short-short-fail" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-24 rdf:type mf:QueryEvaluationTest ; mf:name "tP-byte-short-fail" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-25 rdf:type mf:QueryEvaluationTest ; mf:name "tP-short-long-fail" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-26 rdf:type mf:QueryEvaluationTest ; mf:name "tP-short-int-fail" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-27 rdf:type mf:QueryEvaluationTest ; mf:name "tP-short-byte-fail" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-28 rdf:type mf:QueryEvaluationTest ; mf:name "tP-double-float-fail" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-29 rdf:type mf:QueryEvaluationTest ; mf:name "tP-double-decimal-fail" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . :type-promotion-30 rdf:type mf:QueryEvaluationTest ; mf:name "tP-float-decimal-fail" ; qt:queryForm qt:QueryAsk ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment "Positive test: product of type promotion within the xsd:decimal type tree." ; mf:action [ qt:data ; qt:query ] ; mf:result . rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-byte-short-fail.rq000066400000000000000000000007101415774155300251650ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-byte-short-fail.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:byte1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:short ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-byte-short.rq000066400000000000000000000007051415774155300242600ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-byte-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:byte1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-decimal-decimal.rq000066400000000000000000000007171415774155300251550ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-decimal-decimal.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:decimal1 rdf:value ?l . t:decimal1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:decimal ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-double-decimal-fail.rq000066400000000000000000000007221415774155300257360ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-double-decimal-fail.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:double1 rdf:value ?l . t:decimal1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:decimal ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-double-decimal.rq000066400000000000000000000007141415774155300250260ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-double-decimal.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:double1 rdf:value ?l . t:decimal1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:double ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-double-double.rq000066400000000000000000000007121415774155300247000ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-double-double.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:double1 rdf:value ?l . t:double1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:double ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-double-float-fail.rq000066400000000000000000000007141415774155300254460ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-double-float-fail.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:double1 rdf:value ?l . t:float1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:float ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-double-float.rq000066400000000000000000000007101415774155300245310ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-double-float.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:double1 rdf:value ?l . t:float1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:double ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-float-decimal-fail.rq000066400000000000000000000007201415774155300255670ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-float-decimal-fail.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:float1 rdf:value ?l . t:decimal1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:decimal ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-float-decimal.rq000066400000000000000000000007111415774155300246560ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-float-decimal.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:float1 rdf:value ?l . t:decimal1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:float ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-float-float.rq000066400000000000000000000007051415774155300243700ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-float-float.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:float1 rdf:value ?l . t:float1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:float ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-int-short.rq000066400000000000000000000007031415774155300241050ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-int-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:int1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-integer-short.rq000066400000000000000000000007131415774155300247510ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-integer-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:integer1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-long-short.rq000066400000000000000000000007051415774155300242540ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-long-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:long1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-negativeInteger-short.rq000066400000000000000000000007341415774155300264370ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-negativeInteger-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:negativeIntegerN1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-nonNegativeInteger-short.rq000066400000000000000000000007411415774155300271100ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-nonNegativeInteger-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:nonNegativeInteger1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-nonPositiveInteger-short.rq000066400000000000000000000007421415774155300271510ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-nonPositiveInteger-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:nonPositiveIntegerN1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-positiveInteger-short.rq000066400000000000000000000007331415774155300264760ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-positiveInteger-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:positiveInteger1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-short-byte-fail.rq000066400000000000000000000007111415774155300251660ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-short-byte-fail.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:short1 rdf:value ?l . t:byte1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:double ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-short-decimal.rq000066400000000000000000000007131415774155300247120ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-short-decimal.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:short1 rdf:value ?l . t:decimal1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:decimal ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-short-double.rq000066400000000000000000000007101415774155300245630ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-short-double.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:short1 rdf:value ?l . t:double1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:double ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-short-float.rq000066400000000000000000000007051415774155300244220ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-short-float.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:short1 rdf:value ?l . t:float1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:float ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-short-int-fail.rq000066400000000000000000000007061415774155300250210ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-short-int-fail.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:short1 rdf:value ?l . t:int1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:float ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-short-long-fail.rq000066400000000000000000000007121415774155300251630ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-short-long-fail.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:short1 rdf:value ?l . t:long1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:decimal ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-short-short-fail.rq000066400000000000000000000007121415774155300253630ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-short-short-fail.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:short1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:short ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-short-short.rq000066400000000000000000000007071415774155300244560ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-short-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:short1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-unsignedByte-short.rq000066400000000000000000000007251415774155300257570ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-unsignedByte-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:unsignedByte1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-unsignedInt-short.rq000066400000000000000000000007231415774155300256040ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-unsignedInt-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:unsignedInt1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-unsignedLong-short.rq000066400000000000000000000007251415774155300257530ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-unsignedLong-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:unsignedLong1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP-unsignedShort-short.rq000066400000000000000000000007271415774155300261550ustar00rootroot00000000000000# Positive test: product of type promotion within the xsd:decimal type tree. # $Id: tP-unsignedShort-short.rq,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ PREFIX t: PREFIX rdf: PREFIX xsd: ASK WHERE { t:unsignedShort1 rdf:value ?l . t:short1 rdf:value ?r . FILTER ( datatype(?l + ?r) = xsd:integer ) } rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/tP.ttl000066400000000000000000000022311415774155300223370ustar00rootroot00000000000000# $Id: tP.ttl,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ @prefix rdf: . @prefix xsd: . @prefix t: . t:decimal1 rdf:value "1"^^xsd:decimal . t:float1 rdf:value "1"^^xsd:float . t:double1 rdf:value "1"^^xsd:double . t:booleanT rdf:value "true"^^xsd:boolean . t:dateTime1 rdf:value "2005-01-14T12:34:56"^^xsd:dateTime . # types derived from xsd:decimal: t:integer1 rdf:value "1"^^xsd:integer . t:nonPositiveIntegerN1 rdf:value "-1"^^xsd:nonPositiveInteger . t:negativeIntegerN1 rdf:value "-1"^^xsd:negativeInteger . t:long1 rdf:value "1"^^xsd:long . t:int1 rdf:value "1"^^xsd:int . t:short1 rdf:value "1"^^xsd:short . t:byte1 rdf:value "1"^^xsd:byte . t:nonNegativeInteger1 rdf:value "1"^^xsd:nonNegativeInteger . t:unsignedLong1 rdf:value "1"^^xsd:unsignedLong . t:unsignedInt1 rdf:value "1"^^xsd:unsignedInt . t:unsignedShort1 rdf:value "1"^^xsd:unsignedShort . t:unsignedByte1 rdf:value "1"^^xsd:unsignedByte . t:positiveInteger1 rdf:value "1"^^xsd:positiveInteger . rdflib-6.1.1/test/DAWG/data-r2-1.0/type-promotion/true.ttl000066400000000000000000000005441415774155300227400ustar00rootroot00000000000000# Simple true result to an ASK query. # $Id: true.ttl,v 1.1 2007/06/29 14:24:48 aseaborne Exp $ @prefix rdf: . @prefix rs: . @prefix xsd: . [] rdf:type rs:ResultSet ; rs:boolean "true"^^xsd:boolean . rdflib-6.1.1/test/DAWG/data-r2/000077500000000000000000000000001415774155300157305ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2/LICENSE000066400000000000000000000042731415774155300167430ustar00rootroot00000000000000All tests materials are licensed under the W3C Software Notice and License, as follows: ____________________________________ W3C SOFTWARE NOTICE AND LICENSE http://www.w3.org/Consortium/Legal/2002/copyright-software-20021231 This work (and included software, documentation such as READMEs, or other related items) is being provided by the copyright holders under the following license. By obtaining, using and/or copying this work, you (the licensee) agree that you have read, understood, and will comply with the following terms and conditions. Permission to copy, modify, and distribute this software and its documentation, with or without modification, for any purpose and without fee or royalty is hereby granted, provided that you include the following on ALL copies of the software and documentation or portions thereof, including modifications: 1. The full text of this NOTICE in a location viewable to users of the redistributed or derivative work. 2. Any pre-existing intellectual property disclaimers, notices, or terms and conditions. If none exist, the W3C Software Short Notice should be included (hypertext is preferred, text is permitted) within the body of any redistributed or derivative code. 3. Notice of any changes or modifications to the files, including the date changes were made. (We recommend you provide URIs to the location from which the code is derived.) THIS SOFTWARE AND DOCUMENTATION IS PROVIDED "AS IS," AND COPYRIGHT HOLDERS MAKE NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO, WARRANTIES OF MERCHANTABILITY OR FITNESS FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE OR DOCUMENTATION WILL NOT INFRINGE ANY THIRD PARTY PATENTS, COPYRIGHTS, TRADEMARKS OR OTHER RIGHTS. COPYRIGHT HOLDERS WILL NOT BE LIABLE FOR ANY DIRECT, INDIRECT, SPECIAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF ANY USE OF THE SOFTWARE OR DOCUMENTATION. The name and trademarks of copyright holders may NOT be used in advertising or publicity pertaining to the software without specific, written prior permission. Title to copyright in this software and any associated documentation will at all times remain with copyright holders. ____________________________________ rdflib-6.1.1/test/DAWG/data-r2/README000066400000000000000000000013571415774155300166160ustar00rootroot00000000000000Data Access Working Group SPARQL Query Language test suite reorganization This directory contains restructured copies of tests from the DAWG test suite (available at http://www.w3.org/2001/sw/DataAccess/tests/data/). The purpose of this restructuring is to enhance usability, clear away obsolete tests and provide an up-to-date, consistent and easy to use suite of test cases that SPARQL query language implementors can use to evaluate their implementation. The Working Group decided (http://lists.w3.org/Archives/Public/public-rdf-dawg/2007JulSep/att-0096/21-dawg-minutes.html#item05) on 21 Aug 2007 that the tests as-is constitute a test suite that the group will use to generate an implementation report for the SPARQL Query Language for RDF. rdflib-6.1.1/test/DAWG/data-r2/algebra/000077500000000000000000000000001415774155300173255ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2/algebra/data-1.ttl000066400000000000000000000001631415774155300211210ustar00rootroot00000000000000@prefix : . @prefix xsd: . :x :p "1"^^xsd:integer . rdflib-6.1.1/test/DAWG/data-r2/algebra/data-2.ttl000066400000000000000000000004111415774155300211160ustar00rootroot00000000000000@prefix : . @prefix xsd: . :x :p "1"^^xsd:integer . :x :p "2"^^xsd:integer . :x :p "3"^^xsd:integer . :x :p "4"^^xsd:integer . :x :q "1"^^xsd:integer . :x :q "2"^^xsd:integer . :x :q "3"^^xsd:integer . rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-nested-1.rq000066400000000000000000000001051415774155300225700ustar00rootroot00000000000000PREFIX : SELECT ?v { :x :p ?v . FILTER(?v = 1) } rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-nested-1.srx000066400000000000000000000006451415774155300227730ustar00rootroot00000000000000 1 rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-nested-2.rq000066400000000000000000000001111415774155300225660ustar00rootroot00000000000000PREFIX : SELECT ?v { :x :p ?v . { FILTER(?v = 1) } } rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-nested-2.srx000066400000000000000000000004171415774155300227710ustar00rootroot00000000000000 rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-placement-1.rq000066400000000000000000000001201415774155300232530ustar00rootroot00000000000000PREFIX : SELECT ?v { ?s :p ?v . FILTER (?v = 2) } rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-placement-1.srx000066400000000000000000000006451415774155300234610ustar00rootroot00000000000000 2 rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-placement-2.rq000066400000000000000000000001201415774155300232540ustar00rootroot00000000000000PREFIX : SELECT ?v { FILTER (?v = 2) ?s :p ?v . } rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-placement-2.srx000066400000000000000000000006451415774155300234620ustar00rootroot00000000000000 2 rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-placement-3.rq000066400000000000000000000001651415774155300232660ustar00rootroot00000000000000PREFIX : SELECT ?v ?w { FILTER (?v = 2) FILTER (?w = 3) ?s :p ?v . ?s :q ?w . } rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-placement-3.srx000066400000000000000000000010711415774155300234550ustar00rootroot00000000000000 2 3 rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-scope-1.rq000066400000000000000000000002001415774155300224130ustar00rootroot00000000000000PREFIX : SELECT * { :x :p ?v . { :x :q ?w OPTIONAL { :x :p ?v2 FILTER(?v = 1) } } } rdflib-6.1.1/test/DAWG/data-r2/algebra/filter-scope-1.srx000066400000000000000000000070161415774155300226210ustar00rootroot00000000000000 4 3 4 2 4 1 3 3 3 2 3 1 2 3 2 2 2 1 1 3 1 2 1 1 rdflib-6.1.1/test/DAWG/data-r2/algebra/join-combo-1.rq000066400000000000000000000002071415774155300220620ustar00rootroot00000000000000PREFIX : SELECT ?a ?y ?d ?z { ?a :p ?c OPTIONAL { ?a :r ?d }. ?a ?p 1 { ?p a ?y } UNION { ?a ?z ?p } }rdflib-6.1.1/test/DAWG/data-r2/algebra/join-combo-1.srx000066400000000000000000000022041415774155300222530ustar00rootroot00000000000000 http://example/x1 http://www.w3.org/1999/02/22-rdf-syntax-ns#Property 4 http://example/x1 http://example/z 4 rdflib-6.1.1/test/DAWG/data-r2/algebra/join-combo-2.rq000066400000000000000000000001541415774155300220640ustar00rootroot00000000000000PREFIX : SELECT ?x ?y ?z { GRAPH ?g { ?x ?p 1 } { ?x :p ?y } UNION { ?p a ?z } } rdflib-6.1.1/test/DAWG/data-r2/algebra/join-combo-2.srx000066400000000000000000000011541415774155300222570ustar00rootroot00000000000000 http://example/b http://www.w3.org/1999/02/22-rdf-syntax-ns#Property rdflib-6.1.1/test/DAWG/data-r2/algebra/join-combo-graph-1.ttl000066400000000000000000000002151415774155300233410ustar00rootroot00000000000000@prefix : . @prefix xsd: . :b :p "1"^^xsd:integer . _:a :p "9"^^xsd:integer . rdflib-6.1.1/test/DAWG/data-r2/algebra/join-combo-graph-2.ttl000066400000000000000000000006441415774155300233500ustar00rootroot00000000000000@prefix : . @prefix rdf: . @prefix xsd: . :x1 :p "1"^^xsd:integer . :x1 :r "4"^^xsd:integer . :x2 :p "2"^^xsd:integer . :x2 :r "10"^^xsd:integer . :x2 :x "1"^^xsd:integer . :x3 :q "3"^^xsd:integer . :x3 :q "4"^^xsd:integer . :x3 :s "1"^^xsd:integer . :x3 :t :s . :p a rdf:Property . :x1 :z :p . rdflib-6.1.1/test/DAWG/data-r2/algebra/manifest.ttl000066400000000000000000000167131415774155300216700ustar00rootroot00000000000000@prefix rdf: . @prefix : . @prefix rdfs: . @prefix mf: . @prefix qt: . @prefix dawgt: . <> rdf:type mf:Manifest ; rdfs:label "Algebra" ; mf:entries ( :nested-opt-1 :nested-opt-2 :opt-filter-1 :opt-filter-2 :opt-filter-3 :filter-place-1 :filter-place-2 :filter-place-3 :filter-nested-1 :filter-nested-2 :filter-scope-1 :join-scope-1 :join-combo-1 :join-combo-2 ) . :join-combo-1 a mf:QueryEvaluationTest ; mf:name "Join operator with OPTs, BGPs, and UNIONs" ; rdfs:comment """Tests nested combination of Join with a BGP / OPT and a BGP / UNION""" ; dawgt:approvedBy ; dawgt:approval dawgt:Approved ; mf:action [ qt:query ; qt:data ] ; mf:result . :join-combo-2 a mf:QueryEvaluationTest ; mf:name "Join operator with Graph and Union" ; rdfs:comment """Tests combination of Join operator with Graph on LHS and Union on RHS""" ; dawgt:approvedBy ; dawgt:approval dawgt:Approved ; mf:action [ qt:query ; qt:graphData ; qt:data ] ; mf:result . :nested-opt-1 rdf:type mf:QueryEvaluationTest ; mf:name "Nested Optionals - 1" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; rdfs:comment """Nested-optionals with a shared variable that does not appear in the middle pattern (a not well-formed query pattern as per "Semantics and Complexity" of SPARQL""" ; mf:action [ qt:query ; qt:data ] ; mf:result . :nested-opt-2 rdf:type mf:QueryEvaluationTest ; mf:name "Nested Optionals - 2" ; rdfs:comment "OPTIONALs parse in a left-associative manner" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :opt-filter-1 rdf:type mf:QueryEvaluationTest ; mf:name "Optional-filter - 1" ; rdfs:comment "A FILTER inside an OPTIONAL can reference a variable bound in the required part of the OPTIONAL" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :opt-filter-2 rdf:type mf:QueryEvaluationTest ; mf:name "Optional-filter - 2 filters" ; rdfs:comment "FILTERs inside an OPTIONAL can refer to variables from both the required and optional parts of the construct." ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :opt-filter-3 rdf:type mf:QueryEvaluationTest ; mf:name "Optional-filter - scope of variable" ; rdfs:comment "FILTERs in an OPTIONAL do not extend to variables bound outside of the LeftJoin(...) operation" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :filter-place-1 rdf:type mf:QueryEvaluationTest ; mf:name "Filter-placement - 1" ; rdfs:comment "FILTER placed after the triple pattern that contains the variable tested" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :filter-place-2 rdf:type mf:QueryEvaluationTest ; mf:name "Filter-placement - 2" ; rdfs:comment "FILTERs are scoped to the nearest enclosing group - placement within that group does not matter" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :filter-place-3 rdf:type mf:QueryEvaluationTest ; mf:name "Filter-placement - 3" ; rdfs:comment "FILTERs are scoped to the nearest enclosing group - placement within that group does not matter" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :filter-nested-1 rdf:type mf:QueryEvaluationTest ; mf:name "Filter-nested - 1" ; rdfs:comment "A FILTER is in scope for variables bound at the same level of the query tree" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :filter-nested-2 rdf:type mf:QueryEvaluationTest ; mf:name "Filter-nested - 2" ; rdfs:comment "A FILTER in a group { ... } cannot see variables bound outside that group" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :filter-scope-1 rdf:type mf:QueryEvaluationTest ; mf:name "Filter-scope - 1" ; rdfs:comment "FILTERs in an OPTIONAL do not extend to variables bound outside of the LeftJoin(...) operation" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :join-scope-1 rdf:type mf:QueryEvaluationTest ; mf:name "Join scope - 1" ; rdfs:comment "Variables have query scope." ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-1.rq000066400000000000000000000001561415774155300221160ustar00rootroot00000000000000PREFIX : SELECT * { ?x :p ?v . OPTIONAL { ?y :q ?w . FILTER(?v=2) } } rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-1.srx000066400000000000000000000026351415774155300223140ustar00rootroot00000000000000 http://example/x2 2 http://example/x3 4 http://example/x2 2 http://example/x3 3 http://example/x1 1 rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-1.ttl000066400000000000000000000003161415774155300222750ustar00rootroot00000000000000@prefix : . @prefix xsd: . :x1 :p "1"^^xsd:integer . :x2 :p "2"^^xsd:integer . :x3 :q "3"^^xsd:integer . :x3 :q "4"^^xsd:integer . rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-2.rq000066400000000000000000000001771415774155300221220ustar00rootroot00000000000000PREFIX : SELECT * { ?x :p ?v . OPTIONAL { ?y :q ?w . FILTER(?v=2) FILTER(?w=3) } } rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-2.srx000066400000000000000000000017561415774155300223200ustar00rootroot00000000000000 http://example/x2 2 http://example/x3 3 http://example/x1 1 rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-2.ttl000066400000000000000000000003161415774155300222760ustar00rootroot00000000000000@prefix : . @prefix xsd: . :x1 :p "1"^^xsd:integer . :x2 :p "2"^^xsd:integer . :x3 :q "3"^^xsd:integer . :x3 :q "4"^^xsd:integer . rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-3.rq000066400000000000000000000002541415774155300221170ustar00rootroot00000000000000PREFIX : SELECT * { :x :p ?v . { :x :q ?w # ?v is not in scope so ?v2 never set OPTIONAL { :x :p ?v2 FILTER(?v = 1) } } } rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-3.srx000066400000000000000000000005021415774155300223050ustar00rootroot00000000000000 rdflib-6.1.1/test/DAWG/data-r2/algebra/opt-filter-3.ttl000066400000000000000000000003161415774155300222770ustar00rootroot00000000000000@prefix : . @prefix xsd: . :x1 :p "1"^^xsd:integer . :x2 :p "2"^^xsd:integer . :x3 :q "3"^^xsd:integer . :x3 :q "4"^^xsd:integer . rdflib-6.1.1/test/DAWG/data-r2/algebra/two-nested-opt-alt.rq000066400000000000000000000003761415774155300233460ustar00rootroot00000000000000PREFIX : ## The nested optional example, rewritten to a form that is the same ## for the SPARQL algebra and the declarative semantics. SELECT * { :x1 :p ?v . OPTIONAL { :x3 :q ?w } OPTIONAL { :x3 :q ?w . :x2 :p ?v } } rdflib-6.1.1/test/DAWG/data-r2/algebra/two-nested-opt-alt.srx000066400000000000000000000015121415774155300235310ustar00rootroot00000000000000 1 4 1 3 rdflib-6.1.1/test/DAWG/data-r2/algebra/two-nested-opt.rq000066400000000000000000000002051415774155300225570ustar00rootroot00000000000000PREFIX : SELECT * { :x1 :p ?v . OPTIONAL { :x3 :q ?w . OPTIONAL { :x2 :p ?v } } } rdflib-6.1.1/test/DAWG/data-r2/algebra/two-nested-opt.srx000066400000000000000000000006761415774155300227650ustar00rootroot00000000000000 1 rdflib-6.1.1/test/DAWG/data-r2/algebra/two-nested-opt.ttl000066400000000000000000000003131415774155300227400ustar00rootroot00000000000000@prefix : . @prefix xsd: . :x1 :p "1"^^xsd:integer . :x2 :p "2"^^xsd:integer . :x3 :q "3"^^xsd:integer . :x3 :q "4"^^xsd:integer . rdflib-6.1.1/test/DAWG/data-r2/algebra/var-scope-join-1.rq000066400000000000000000000001611415774155300226610ustar00rootroot00000000000000PREFIX : SELECT * { ?X :name "paul" {?Y :name "george" . OPTIONAL { ?X :email ?Z } } } rdflib-6.1.1/test/DAWG/data-r2/algebra/var-scope-join-1.srx000066400000000000000000000005011415774155300230510ustar00rootroot00000000000000 rdflib-6.1.1/test/DAWG/data-r2/algebra/var-scope-join-1.ttl000066400000000000000000000005011415774155300230400ustar00rootroot00000000000000@prefix : . _:B1 :name "paul" . _:B1 :phone "777-3426". _:B2 :name "john" . _:B2 :email . _:B3 :name "george". _:B3 :webPage . _:B4 :name "ringo". _:B4 :email . _:B4 :webPage . _:B4 :phone "888-4537". rdflib-6.1.1/test/DAWG/data-r2/ask/000077500000000000000000000000001415774155300165065ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2/ask/ask-1.rq000066400000000000000000000000561415774155300177670ustar00rootroot00000000000000PREFIX : ASK { :x :p 1 } rdflib-6.1.1/test/DAWG/data-r2/ask/ask-1.srx000066400000000000000000000002051415774155300201550ustar00rootroot00000000000000 true rdflib-6.1.1/test/DAWG/data-r2/ask/ask-4.rq000066400000000000000000000000571415774155300177730ustar00rootroot00000000000000PREFIX : ASK { :x :p 99 } rdflib-6.1.1/test/DAWG/data-r2/ask/ask-4.srx000066400000000000000000000002061415774155300201610ustar00rootroot00000000000000 false rdflib-6.1.1/test/DAWG/data-r2/ask/ask-7.rq000066400000000000000000000000571415774155300177760ustar00rootroot00000000000000PREFIX : ASK { :x :p ?x } rdflib-6.1.1/test/DAWG/data-r2/ask/ask-7.srx000066400000000000000000000002051415774155300201630ustar00rootroot00000000000000 true rdflib-6.1.1/test/DAWG/data-r2/ask/ask-8.rq000066400000000000000000000001011415774155300177650ustar00rootroot00000000000000PREFIX : ASK { :x :p ?x . FILTER(?x = 99) } rdflib-6.1.1/test/DAWG/data-r2/ask/ask-8.srx000066400000000000000000000002061415774155300201650ustar00rootroot00000000000000 false rdflib-6.1.1/test/DAWG/data-r2/ask/data.ttl000066400000000000000000000002751415774155300201500ustar00rootroot00000000000000@prefix : . @prefix xsd: . :x :p "1"^^xsd:integer . :x :p "2"^^xsd:integer . :x :p "3"^^xsd:integer . :y :p :a . :a :q :r . rdflib-6.1.1/test/DAWG/data-r2/ask/manifest.ttl000066400000000000000000000042111415774155300210370ustar00rootroot00000000000000@prefix rdf: . @prefix : . @prefix rdfs: . @prefix mf: . @prefix qt: . @prefix dawgt: . <> rdf:type mf:Manifest ; rdfs:label "ASK" ; mf:entries ( :ask-1 :ask-4 :ask-7 :ask-8 ) . :ask-1 rdf:type mf:QueryEvaluationTest ; qt:queryForm qt:QueryAsk ; mf:name "ASK-1 (SPARQL XML results)" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :ask-4 rdf:type mf:QueryEvaluationTest ; qt:queryForm qt:QueryAsk ; mf:name "ASK-4 (SPARQL XML results)" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :ask-7 rdf:type mf:QueryEvaluationTest ; qt:queryForm qt:QueryAsk ; mf:name "ASK-7 (SPARQL XML results)" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . :ask-8 rdf:type mf:QueryEvaluationTest ; qt:queryForm qt:QueryAsk ; mf:name "ASK-8 (SPARQL XML results)" ; dawgt:approval dawgt:Approved ; dawgt:approvedBy ; mf:action [ qt:query ; qt:data ] ; mf:result . rdflib-6.1.1/test/DAWG/data-r2/basic/000077500000000000000000000000001415774155300170115ustar00rootroot00000000000000rdflib-6.1.1/test/DAWG/data-r2/basic/base-prefix-1.rq000066400000000000000000000001101415774155300217100ustar00rootroot00000000000000BASE PREFIX : <> SELECT * WHERE { :x ?p ?v } rdflib-6.1.1/test/DAWG/data-r2/basic/base-prefix-1.srx000066400000000000000000000012421415774155300221110ustar00rootroot00000000000000 d:x ns:p http://example.org/ns#p x:x x:p http://example.org/x/p rdflib-6.1.1/test/DAWG/data-r2/basic/base-prefix-2.rq000066400000000000000000000001111415774155300217120ustar00rootroot00000000000000BASE PREFIX : <#> SELECT * WHERE { :x ?p ?v } rdflib-6.1.1/test/DAWG/data-r2/basic/base-prefix-2.srx000066400000000000000000000007451415774155300221210ustar00rootroot00000000000000 z:x z:p http://example.org/x/#p rdflib-6.1.1/test/DAWG/data-r2/basic/base-prefix-3.rq000066400000000000000000000001501415774155300217160ustar00rootroot00000000000000PREFIX ns: PREFIX x: SELECT * WHERE { x:x ns:p ?v } rdflib-6.1.1/test/DAWG/data-r2/basic/base-prefix-3.srx000066400000000000000000000005701415774155300221160ustar00rootroot00000000000000 d:x ns:p rdflib-6.1.1/test/DAWG/data-r2/basic/base-prefix-4.rq000066400000000000000000000000751415774155300217250ustar00rootroot00000000000000BASE SELECT * WHERE {